From 9b03fd1f042fc59f69c642fa58a638740feb7eca Mon Sep 17 00:00:00 2001 From: sale Date: Mon, 5 Dec 2016 08:59:07 +0000 Subject: [PATCH 01/26] Added Basic BloomFilter and BloomConcurrentMap Summary: Added Basic BloomFilter and BloomConcurrentMap Test Plan: manual Reviewers: buda Subscribers: buda Maniphest Tasks: T115 Differential Revision: https://memgraph.phacility.com/D15 --- .../bloom/basic_bloom_filter.hpp | 58 ++++ .../concurrent/concurrent_bloom_map.hpp | 36 +++ .../bloom/basic_bloom_filter.cpp | 59 ++++ .../concurrent/concurrent_bloom_map.cpp | 270 ++++++++++++++++++ tests/unit/basic_bloom_filter.cpp | 45 +++ 5 files changed, 468 insertions(+) create mode 100644 include/data_structures/bloom/basic_bloom_filter.hpp create mode 100644 include/data_structures/concurrent/concurrent_bloom_map.hpp create mode 100644 tests/benchmark/data_structures/bloom/basic_bloom_filter.cpp create mode 100644 tests/benchmark/data_structures/concurrent/concurrent_bloom_map.cpp create mode 100644 tests/unit/basic_bloom_filter.cpp diff --git a/include/data_structures/bloom/basic_bloom_filter.hpp b/include/data_structures/bloom/basic_bloom_filter.hpp new file mode 100644 index 000000000..99e26f0aa --- /dev/null +++ b/include/data_structures/bloom/basic_bloom_filter.hpp @@ -0,0 +1,58 @@ +#include +#include +#include + +template +class BasicBloomFilter { + private: + using HashFunction = std::function; + using CompresionFunction = std::function; + + std::bitset filter_; + std::vector hashes_; + CompresionFunction compression_; + std::vector buckets; + + int default_compression(uint64_t hash) { return hash % BucketSize; } + + void get_buckets(const Type& data) { + for (int i = 0; i < hashes_.size(); i++) + buckets[i] = compression_(hashes_[i](data)); + } + + void print_buckets(std::vector& buckets) { + for (int i = 0; i < buckets.size(); i++) { + std::cout << buckets[i] << " "; + } + std::cout << std::endl; + } + + public: + BasicBloomFilter(std::vector funcs, + CompresionFunction compression = {}) + : hashes_(funcs) { + if (!compression) + compression_ = std::bind(&BasicBloomFilter::default_compression, this, + std::placeholders::_1); + else + compression_ = compression; + + buckets.resize(hashes_.size()); + } + + bool contains(const Type& data) { + get_buckets(data); + bool contains_element = true; + + for (int i = 0; i < buckets.size(); i++) + contains_element &= filter_[buckets[i]]; + + return contains_element; + } + + void insert(const Type& data) { + get_buckets(data); + + for (int i = 0; i < buckets.size(); i++) filter_[buckets[i]] = true; + } +}; diff --git a/include/data_structures/concurrent/concurrent_bloom_map.hpp b/include/data_structures/concurrent/concurrent_bloom_map.hpp new file mode 100644 index 000000000..8ffca031e --- /dev/null +++ b/include/data_structures/concurrent/concurrent_bloom_map.hpp @@ -0,0 +1,36 @@ +#pragma once + +#include "data_structures/concurrent/common.hpp" +#include "data_structures/concurrent/skiplist.hpp" +#include "data_structures/concurrent/concurrent_map.hpp" + + +using std::pair; + +template +class ConcurrentBloomMap { + using item_t = Item; + using list_it = typename SkipList::Iterator; + + private: + ConcurrentMap map_; + BloomFilter filter_; + + public: + ConcurrentBloomMap(BloomFilter filter) : filter_(filter) {} + + std::pair insert(const Key &key, const Value &data) { + filter_.insert(key); + + auto accessor = std::move(map_.access()); + + return accessor.insert(key, data); + } + + bool contains(const Key &key) { + if (!filter_.contains(key)) return false; + + auto accessor = map_.access(); + return accessor.contains(key); + } +}; diff --git a/tests/benchmark/data_structures/bloom/basic_bloom_filter.cpp b/tests/benchmark/data_structures/bloom/basic_bloom_filter.cpp new file mode 100644 index 000000000..231993fcb --- /dev/null +++ b/tests/benchmark/data_structures/bloom/basic_bloom_filter.cpp @@ -0,0 +1,59 @@ +#include +#include + +#include "data_structures/bloom/basic_bloom_filter.hpp" +#include "logging/default.hpp" +#include "logging/streams/stdout.hpp" +#include "utils/command_line/arguments.hpp" +#include "utils/hashing/fnv64.hpp" +#include "utils/random/generator.h" + +#include "benchmark/benchmark_api.h" + +using utils::random::StringGenerator; +using StringHashFunction = std::function; + +template +static void TestBloom(benchmark::State& state, BasicBloomFilter* +bloom, const std::vector& elements) { + while(state.KeepRunning()) { + for (int start = 0; start < state.range(0); start++) + if (start % 2) bloom->contains(elements[start]); + else bloom->insert(elements[start]); + } + state.SetComplexityN(state.range(0)); +} + +auto BM_Bloom = [](benchmark::State& state, auto* bloom, const auto& elements) { + TestBloom(state, bloom, elements); +}; + +void parse_args(int argc, char** argv) {} + +int main(int argc, char** argv) { + logging::init_async(); + logging::log->pipe(std::make_unique()); + + parse_args(argc, argv); + + StringGenerator generator(4); + + auto elements = utils::random::generate_vector(generator, 1 << 16); + + StringHashFunction hash1 = fnv64; + StringHashFunction hash2 = fnv1a64; + std::vector funcs = { + hash1, hash2 + }; + + BasicBloomFilter bloom(funcs); + + benchmark::RegisterBenchmark("SimpleBloomFilter Benchmark Test", BM_Bloom, + &bloom, elements) + ->RangeMultiplier(2) + ->Range(1, 1 << 16) + ->Complexity(benchmark::oN); + + benchmark::Initialize(&argc, argv); + benchmark::RunSpecifiedBenchmarks(); +} diff --git a/tests/benchmark/data_structures/concurrent/concurrent_bloom_map.cpp b/tests/benchmark/data_structures/concurrent/concurrent_bloom_map.cpp new file mode 100644 index 000000000..17df96bb5 --- /dev/null +++ b/tests/benchmark/data_structures/concurrent/concurrent_bloom_map.cpp @@ -0,0 +1,270 @@ +#include +#include + +#include "data_structures/bloom/basic_bloom_filter.hpp" +#include "data_structures/concurrent/concurrent_bloom_map.hpp" +#include "logging/default.hpp" +#include "logging/streams/stdout.hpp" +#include "utils/command_line/arguments.hpp" +#include "utils/hashing/fnv64.hpp" +#include "utils/random/generator.h" + +#include "benchmark/benchmark_api.h" + +/* + ConcurrentMap Benchmark Test: + - tests time of Insertion, Contain and Delete operations + + - benchmarking time per operation + + - test run ConcurrentMap with the following keys and values: + - + - + - + - +*/ + +using utils::random::NumberGenerator; +using utils::random::PairGenerator; +using utils::random::StringGenerator; +using StringHashFunction = std::function; + +using IntegerGenerator = NumberGenerator, + std::default_random_engine, int>; + +// Global arguments +int MAX_ELEMENTS = 1 << 18, MULTIPLIER = 2; +int THREADS, RANGE_START, RANGE_END, STRING_LENGTH; + +/* + ConcurrentMap Insertion Benchmark Test +*/ +template +static void InsertValue(benchmark::State& state, ConcurrentBloomMap* map, + const std::vector>& elements) { + while (state.KeepRunning()) { + for (int start = 0; start < state.range(0); start++) { + map->insert(elements[start].first, elements[start].second); + } + } + state.SetComplexityN(state.range(0)); +} + +/* + ConcurrentMap Deletion Benchmark Test +template +static void DeleteValue(benchmark::State& state, ConcurrentBloomMap* map, + const std::vector> elements) { + while (state.KeepRunning()) { + auto accessor = map->access(); + for (int start = 0; start < state.range(0); start++) { + accessor.remove(elements[start].first); + } + } + state.SetComplexityN(state.range(0)); +} +*/ + +/* + ConcurrentMap Contains Benchmark Test +*/ +template +static void ContainsValue(benchmark::State& state, ConcurrentBloomMap* map, + const std::vector> elements) { + while (state.KeepRunning()) { + for (int start = 0; start < state.range(0); start++) { + map->contains(elements[start].first); + } + } + state.SetComplexityN(state.range(0)); +} + +auto BM_InsertValue = [](benchmark::State& state, auto* map, auto& elements) { + InsertValue(state, map, elements); +}; + +/* +auto BM_DeleteValue = [](benchmark::State& state, auto* map, auto elements) { + DeleteValue(state, map, elements); +}; +*/ + +auto BM_ContainsValue = [](benchmark::State& state, auto* map, auto elements) { + ContainsValue(state, map, elements); +}; + +/* + Commandline Argument Parsing + + Arguments: + * Integer Range Minimum + -start number + + * Integer Range Maximum + - end number + + * Number of threads + - threads number + + * Random String lenght + -string-length number +*/ +void parse_arguments(int argc, char** argv) { + REGISTER_ARGS(argc, argv); + + RANGE_START = GET_ARG("-start", "0").get_int(); + RANGE_END = GET_ARG("-end", "1000000000").get_int(); + + THREADS = std::min(GET_ARG("-threads", "1").get_int(), + (int)std::thread::hardware_concurrency()); + + STRING_LENGTH = + ProgramArguments::instance().get_arg("-string-length", "128").get_int(); +} + +int main(int argc, char** argv) { + logging::init_async(); + logging::log->pipe(std::make_unique()); + + parse_arguments(argc, argv); + + StringGenerator sg(STRING_LENGTH); + IntegerGenerator ig(RANGE_START, RANGE_END); + + /* + Creates RandomGenerators, ConcurentMaps and Random Element Vectors for the + following use cases: + + Map elements contain keys and value for: + , + + + + */ + + // random generators for tests + PairGenerator piig(&ig, &ig); + PairGenerator pssg(&sg, &sg); + PairGenerator psig(&sg, &ig); + PairGenerator pisg(&ig, &sg); + + StringHashFunction hash1 = fnv64; + StringHashFunction hash2 = fnv1a64; + std::vector funcs = { + hash1, hash2 + }; + + BasicBloomFilter bloom_filter_(funcs); + + // maps used for testing + //ConcurrentBloomMap ii_map; + //ConcurrentBloomMap is_map; + using Filter = BasicBloomFilter; + ConcurrentBloomMap si_map(bloom_filter_); + ConcurrentBloomMap +ss_map(bloom_filter_); + + // random elements for testing + //auto ii_elems = utils::random::generate_vector(piig, MAX_ELEMENTS); + //auto is_elems = utils::random::generate_vector(pisg, MAX_ELEMENTS); + auto si_elems = utils::random::generate_vector(psig, MAX_ELEMENTS); + auto ss_elems = utils::random::generate_vector(pssg, MAX_ELEMENTS); + + /* insertion Tests */ + /* + benchmark::RegisterBenchmark("InsertValue[Int, Int]", BM_InsertValue, &ii_map, + ii_elems) + ->RangeMultiplier(MULTIPLIER) + ->Range(1, MAX_ELEMENTS) + ->Complexity(benchmark::oN) + ->Threads(THREADS); + + benchmark::RegisterBenchmark("InsertValue[Int, String]", BM_InsertValue, + &is_map, is_elems) + ->RangeMultiplier(MULTIPLIER) + ->Range(1, MAX_ELEMENTS) + ->Complexity(benchmark::oN) + ->Threads(THREADS); + */ + benchmark::RegisterBenchmark("InsertValue[String, Int]", BM_InsertValue, + &si_map, si_elems) + ->RangeMultiplier(MULTIPLIER) + ->Range(1, MAX_ELEMENTS) + ->Complexity(benchmark::oN) + ->Threads(THREADS); + + benchmark::RegisterBenchmark("InsertValue[String, String]", BM_InsertValue, + &ss_map, ss_elems) + ->RangeMultiplier(MULTIPLIER) + ->Range(1, MAX_ELEMENTS) + ->Complexity(benchmark::oN) + ->Threads(THREADS); + + // Contains Benchmark Tests + + /* + benchmark::RegisterBenchmark("ContainsValue[Int, Int]", BM_ContainsValue, + &ii_map, ii_elems) + ->RangeMultiplier(MULTIPLIER) + ->Range(1, MAX_ELEMENTS) + ->Complexity(benchmark::oN) + ->Threads(THREADS); + + benchmark::RegisterBenchmark("ContainsValue[Int, String]", BM_ContainsValue, + &is_map, is_elems) + ->RangeMultiplier(MULTIPLIER) + ->Range(1, MAX_ELEMENTS) + ->Complexity(benchmark::oN) + ->Threads(THREADS); + */ + benchmark::RegisterBenchmark("ContainsValue[String, Int]", BM_ContainsValue, + &si_map, si_elems) + ->RangeMultiplier(MULTIPLIER) + ->Range(1, MAX_ELEMENTS) + ->Complexity(benchmark::oN) + ->Threads(THREADS); + + benchmark::RegisterBenchmark("ContainsValue[String, String]", + BM_ContainsValue, &ss_map, ss_elems) + ->RangeMultiplier(MULTIPLIER) + ->Range(1, MAX_ELEMENTS) + ->Complexity(benchmark::oN) + ->Threads(THREADS); + + // Deletion Banchamark Tests + /* + + benchmark::RegisterBenchmark("DeleteValue[Int, Int]", BM_DeleteValue, &ii_map, + ii_elems) + ->RangeMultiplier(MULTIPLIER) + ->Range(1, MAX_ELEMENTS) + ->Complexity(benchmark::oN) + ->Threads(THREADS); + + benchmark::RegisterBenchmark("DeleteValue[Int, String]", BM_DeleteValue, + &is_map, is_elems) + ->RangeMultiplier(MULTIPLIER) + ->Range(1, MAX_ELEMENTS) + ->Complexity(benchmark::oN) + ->Threads(THREADS); + + benchmark::RegisterBenchmark("DeleteValue[String, Int]", BM_DeleteValue, + &si_map, si_elems) + ->RangeMultiplier(MULTIPLIER) + ->Range(1, MAX_ELEMENTS) + ->Complexity(benchmark::oN) + ->Threads(THREADS); + + benchmark::RegisterBenchmark("DeleteValue[String, String]", BM_DeleteValue, + &ss_map, ss_elems) + ->RangeMultiplier(MULTIPLIER) + ->Range(1, MAX_ELEMENTS) + ->Complexity(benchmark::oN) + ->Threads(THREADS); + */ + + benchmark::Initialize(&argc, argv); + benchmark::RunSpecifiedBenchmarks(); + + return 0; +} diff --git a/tests/unit/basic_bloom_filter.cpp b/tests/unit/basic_bloom_filter.cpp new file mode 100644 index 000000000..77484ca08 --- /dev/null +++ b/tests/unit/basic_bloom_filter.cpp @@ -0,0 +1,45 @@ +#define CATCH_CONFIG_MAIN +#include "catch.hpp" + +#include "utils/command_line/arguments.hpp" +#include "utils/hashing/fnv64.hpp" + +#include "data_structures/bloom/basic_bloom_filter.hpp" + +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wwritable-strings" + +using StringHashFunction = std::function; + +TEST_CASE("BasicBloomFilter Test") { + StringHashFunction hash1 = fnv64; + StringHashFunction hash2 = fnv1a64; + + auto c = [](auto x) -> int { + return x % 4; + } ; + std::vector funcs = { + hash1, hash2 + }; + + BasicBloomFilter bloom(funcs); + + std::string test = "test"; + std::string kifla = "pizda"; + + std::cout << hash1(test) << std::endl; + std::cout << hash2(test) << std::endl; + + std::cout << hash1(kifla) << std::endl; + std::cout << hash2(kifla) << std::endl; + + std::cout << bloom.contains(test) << std::endl; + bloom.insert(test); + std::cout << bloom.contains(test) << std::endl; + + std::cout << bloom.contains(kifla) << std::endl; + bloom.insert(kifla); + std::cout << bloom.contains(kifla) << std::endl; +} + +#pragma clang diagnostic pop From 52c5159bc0ffa4aaa0b3f062b0fdc360dd1382bd Mon Sep 17 00:00:00 2001 From: sale Date: Wed, 7 Dec 2016 13:20:53 +0000 Subject: [PATCH 02/26] Bloom filter code review changes --- ...asic_bloom_filter.hpp => bloom_filter.hpp} | 19 ++-- .../bloom/basic_bloom_filter.cpp | 6 +- .../concurrent/concurrent_bloom_map.cpp | 90 +------------------ tests/unit/basic_bloom_filter.cpp | 8 +- 4 files changed, 24 insertions(+), 99 deletions(-) rename include/data_structures/bloom/{basic_bloom_filter.hpp => bloom_filter.hpp} (75%) diff --git a/include/data_structures/bloom/basic_bloom_filter.hpp b/include/data_structures/bloom/bloom_filter.hpp similarity index 75% rename from include/data_structures/bloom/basic_bloom_filter.hpp rename to include/data_structures/bloom/bloom_filter.hpp index 99e26f0aa..33da0df80 100644 --- a/include/data_structures/bloom/basic_bloom_filter.hpp +++ b/include/data_structures/bloom/bloom_filter.hpp @@ -2,8 +2,17 @@ #include #include +/* + Implementation of a generic Bloom Filter. + + Read more about bloom filters here: + http://en.wikipedia.org/wiki/Bloom_filter + http://www.jasondavies.com/bloomfilter/ +*/ + +// Type specifies the type of data stored template -class BasicBloomFilter { +class BloomFilter { private: using HashFunction = std::function; using CompresionFunction = std::function; @@ -28,13 +37,13 @@ class BasicBloomFilter { } public: - BasicBloomFilter(std::vector funcs, - CompresionFunction compression = {}) + BloomFilter(std::vector funcs, + CompresionFunction compression = {}) : hashes_(funcs) { if (!compression) - compression_ = std::bind(&BasicBloomFilter::default_compression, this, + compression_ = std::bind(&BloomFilter::default_compression, this, std::placeholders::_1); - else + else compression_ = compression; buckets.resize(hashes_.size()); diff --git a/tests/benchmark/data_structures/bloom/basic_bloom_filter.cpp b/tests/benchmark/data_structures/bloom/basic_bloom_filter.cpp index 231993fcb..36a74506d 100644 --- a/tests/benchmark/data_structures/bloom/basic_bloom_filter.cpp +++ b/tests/benchmark/data_structures/bloom/basic_bloom_filter.cpp @@ -1,7 +1,7 @@ #include #include -#include "data_structures/bloom/basic_bloom_filter.hpp" +#include "data_structures/bloom/bloom_filter.hpp" #include "logging/default.hpp" #include "logging/streams/stdout.hpp" #include "utils/command_line/arguments.hpp" @@ -14,7 +14,7 @@ using utils::random::StringGenerator; using StringHashFunction = std::function; template -static void TestBloom(benchmark::State& state, BasicBloomFilter* +static void TestBloom(benchmark::State& state, BloomFilter* bloom, const std::vector& elements) { while(state.KeepRunning()) { for (int start = 0; start < state.range(0); start++) @@ -46,7 +46,7 @@ int main(int argc, char** argv) { hash1, hash2 }; - BasicBloomFilter bloom(funcs); + BloomFilter bloom(funcs); benchmark::RegisterBenchmark("SimpleBloomFilter Benchmark Test", BM_Bloom, &bloom, elements) diff --git a/tests/benchmark/data_structures/concurrent/concurrent_bloom_map.cpp b/tests/benchmark/data_structures/concurrent/concurrent_bloom_map.cpp index 17df96bb5..f305d8b20 100644 --- a/tests/benchmark/data_structures/concurrent/concurrent_bloom_map.cpp +++ b/tests/benchmark/data_structures/concurrent/concurrent_bloom_map.cpp @@ -1,7 +1,7 @@ #include #include -#include "data_structures/bloom/basic_bloom_filter.hpp" +#include "data_structures/bloom/bloom_filter.hpp" #include "data_structures/concurrent/concurrent_bloom_map.hpp" #include "logging/default.hpp" #include "logging/streams/stdout.hpp" @@ -50,21 +50,6 @@ static void InsertValue(benchmark::State& state, ConcurrentBloomMap* ma state.SetComplexityN(state.range(0)); } -/* - ConcurrentMap Deletion Benchmark Test -template -static void DeleteValue(benchmark::State& state, ConcurrentBloomMap* map, - const std::vector> elements) { - while (state.KeepRunning()) { - auto accessor = map->access(); - for (int start = 0; start < state.range(0); start++) { - accessor.remove(elements[start].first); - } - } - state.SetComplexityN(state.range(0)); -} -*/ - /* ConcurrentMap Contains Benchmark Test */ @@ -83,12 +68,6 @@ auto BM_InsertValue = [](benchmark::State& state, auto* map, auto& elements) { InsertValue(state, map, elements); }; -/* -auto BM_DeleteValue = [](benchmark::State& state, auto* map, auto elements) { - DeleteValue(state, map, elements); -}; -*/ - auto BM_ContainsValue = [](benchmark::State& state, auto* map, auto elements) { ContainsValue(state, map, elements); }; @@ -154,12 +133,12 @@ int main(int argc, char** argv) { hash1, hash2 }; - BasicBloomFilter bloom_filter_(funcs); + BloomFilter bloom_filter_(funcs); // maps used for testing //ConcurrentBloomMap ii_map; //ConcurrentBloomMap is_map; - using Filter = BasicBloomFilter; + using Filter = BloomFilter; ConcurrentBloomMap si_map(bloom_filter_); ConcurrentBloomMap ss_map(bloom_filter_); @@ -171,21 +150,6 @@ ss_map(bloom_filter_); auto ss_elems = utils::random::generate_vector(pssg, MAX_ELEMENTS); /* insertion Tests */ - /* - benchmark::RegisterBenchmark("InsertValue[Int, Int]", BM_InsertValue, &ii_map, - ii_elems) - ->RangeMultiplier(MULTIPLIER) - ->Range(1, MAX_ELEMENTS) - ->Complexity(benchmark::oN) - ->Threads(THREADS); - - benchmark::RegisterBenchmark("InsertValue[Int, String]", BM_InsertValue, - &is_map, is_elems) - ->RangeMultiplier(MULTIPLIER) - ->Range(1, MAX_ELEMENTS) - ->Complexity(benchmark::oN) - ->Threads(THREADS); - */ benchmark::RegisterBenchmark("InsertValue[String, Int]", BM_InsertValue, &si_map, si_elems) ->RangeMultiplier(MULTIPLIER) @@ -201,22 +165,6 @@ ss_map(bloom_filter_); ->Threads(THREADS); // Contains Benchmark Tests - - /* - benchmark::RegisterBenchmark("ContainsValue[Int, Int]", BM_ContainsValue, - &ii_map, ii_elems) - ->RangeMultiplier(MULTIPLIER) - ->Range(1, MAX_ELEMENTS) - ->Complexity(benchmark::oN) - ->Threads(THREADS); - - benchmark::RegisterBenchmark("ContainsValue[Int, String]", BM_ContainsValue, - &is_map, is_elems) - ->RangeMultiplier(MULTIPLIER) - ->Range(1, MAX_ELEMENTS) - ->Complexity(benchmark::oN) - ->Threads(THREADS); - */ benchmark::RegisterBenchmark("ContainsValue[String, Int]", BM_ContainsValue, &si_map, si_elems) ->RangeMultiplier(MULTIPLIER) @@ -231,38 +179,6 @@ ss_map(bloom_filter_); ->Complexity(benchmark::oN) ->Threads(THREADS); - // Deletion Banchamark Tests - /* - - benchmark::RegisterBenchmark("DeleteValue[Int, Int]", BM_DeleteValue, &ii_map, - ii_elems) - ->RangeMultiplier(MULTIPLIER) - ->Range(1, MAX_ELEMENTS) - ->Complexity(benchmark::oN) - ->Threads(THREADS); - - benchmark::RegisterBenchmark("DeleteValue[Int, String]", BM_DeleteValue, - &is_map, is_elems) - ->RangeMultiplier(MULTIPLIER) - ->Range(1, MAX_ELEMENTS) - ->Complexity(benchmark::oN) - ->Threads(THREADS); - - benchmark::RegisterBenchmark("DeleteValue[String, Int]", BM_DeleteValue, - &si_map, si_elems) - ->RangeMultiplier(MULTIPLIER) - ->Range(1, MAX_ELEMENTS) - ->Complexity(benchmark::oN) - ->Threads(THREADS); - - benchmark::RegisterBenchmark("DeleteValue[String, String]", BM_DeleteValue, - &ss_map, ss_elems) - ->RangeMultiplier(MULTIPLIER) - ->Range(1, MAX_ELEMENTS) - ->Complexity(benchmark::oN) - ->Threads(THREADS); - */ - benchmark::Initialize(&argc, argv); benchmark::RunSpecifiedBenchmarks(); diff --git a/tests/unit/basic_bloom_filter.cpp b/tests/unit/basic_bloom_filter.cpp index 77484ca08..ac4df7fc2 100644 --- a/tests/unit/basic_bloom_filter.cpp +++ b/tests/unit/basic_bloom_filter.cpp @@ -4,14 +4,14 @@ #include "utils/command_line/arguments.hpp" #include "utils/hashing/fnv64.hpp" -#include "data_structures/bloom/basic_bloom_filter.hpp" +#include "data_structures/bloom/bloom_filter.hpp" #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wwritable-strings" using StringHashFunction = std::function; -TEST_CASE("BasicBloomFilter Test") { +TEST_CASE("BloomFilter Test") { StringHashFunction hash1 = fnv64; StringHashFunction hash2 = fnv1a64; @@ -22,10 +22,10 @@ TEST_CASE("BasicBloomFilter Test") { hash1, hash2 }; - BasicBloomFilter bloom(funcs); + BloomFilter bloom(funcs); std::string test = "test"; - std::string kifla = "pizda"; + std::string kifla = "kifla"; std::cout << hash1(test) << std::endl; std::cout << hash2(test) << std::endl; From 7b3c4c270e6a434e248757eb60e57606e746bc27 Mon Sep 17 00:00:00 2001 From: Marko Budiselic Date: Wed, 14 Dec 2016 10:27:41 +0100 Subject: [PATCH 03/26] work in progress; TODO: change commit message --- cmake/copy_includes.cmake | 5 +- include/query/engine.hpp | 15 ++ include/query/plan/compiler.hpp | 8 +- include/query/plan/program_loader.hpp | 13 ++ include/storage/edge_x_vertex.hpp | 2 - .../utils/exceptions/not_yet_implemented.hpp | 2 + poc/queries/astar.hpp | 20 +- release/alpha.sh | 4 + .../core/{dressipi.txt => dressipi_basic.txt} | 0 tests/data/queries/core/dressipi_graph.txt | 70 +++++++ .../queries/core/{basic.txt => mg_basic.txt} | 0 tests/data/queries/core/pool.txt | 2 - .../integration/_hardcoded_query/dressipi.hpp | 4 + .../hardcoded_query/11856262817829095719.cpp | 188 ++++++++++++++++++ tests/integration/queries.cpp | 7 +- .../stream/print_record_stream.hpp | 140 +++++++++++++ 16 files changed, 464 insertions(+), 16 deletions(-) rename tests/data/queries/core/{dressipi.txt => dressipi_basic.txt} (100%) create mode 100644 tests/data/queries/core/dressipi_graph.txt rename tests/data/queries/core/{basic.txt => mg_basic.txt} (100%) delete mode 100644 tests/data/queries/core/pool.txt create mode 100644 tests/integration/hardcoded_query/11856262817829095719.cpp create mode 100644 tests/integration/stream/print_record_stream.hpp diff --git a/cmake/copy_includes.cmake b/cmake/copy_includes.cmake index 7835717de..4b0ad5a4b 100644 --- a/cmake/copy_includes.cmake +++ b/cmake/copy_includes.cmake @@ -132,7 +132,6 @@ FILE(COPY ${include_dir}/utils/counters/atomic_counter.hpp DESTINATION ${build_i FILE(COPY ${include_dir}/utils/counters/simple_counter.hpp DESTINATION ${build_include_dir}/utils/counters) FILE(COPY ${include_dir}/utils/random/fast_binomial.hpp DESTINATION ${build_include_dir}/utils/random) FILE(COPY ${include_dir}/utils/random/xorshift128plus.hpp DESTINATION ${build_include_dir}/utils/random) -FILE(COPY ${include_dir}/utils/exceptions/basic_exception.hpp DESTINATION ${build_include_dir}/utils/exceptions) FILE(COPY ${include_dir}/utils/datetime/timestamp.hpp DESTINATION ${build_include_dir}/utils/datetime) FILE(COPY ${include_dir}/utils/datetime/datetime_error.hpp DESTINATION ${build_include_dir}/utils/datetime) FILE(COPY ${include_dir}/utils/types/byte.hpp DESTINATION ${build_include_dir}/utils/types) @@ -141,6 +140,10 @@ FILE(COPY ${include_dir}/utils/option.hpp DESTINATION ${build_include_dir}/utils FILE(COPY ${include_dir}/utils/border.hpp DESTINATION ${build_include_dir}/utils) FILE(COPY ${include_dir}/utils/order.hpp DESTINATION ${build_include_dir}/utils) FILE(COPY ${include_dir}/utils/numerics/saturate.hpp DESTINATION ${build_include_dir}/utils/numerics) +FILE(COPY ${include_dir}/utils/memory/stack_allocator.hpp DESTINATION ${build_include_dir}/utils/memory) +FILE(COPY ${include_dir}/utils/memory/block_allocator.hpp DESTINATION ${build_include_dir}/utils/memory) +FILE(COPY ${include_dir}/utils/exceptions/basic_exception.hpp DESTINATION ${build_include_dir}/utils/exceptions) +FILE(COPY ${include_dir}/utils/exceptions/out_of_memory.hpp DESTINATION ${build_include_dir}/utils/exceptions) FILE(COPY ${include_dir}/utils/iterator/iterator_base.hpp DESTINATION ${build_include_dir}/utils/iterator) FILE(COPY ${include_dir}/utils/iterator/virtual_iter.hpp DESTINATION ${build_include_dir}/utils/iterator) diff --git a/include/query/engine.hpp b/include/query/engine.hpp index 1e6dabb88..c506f4c7e 100644 --- a/include/query/engine.hpp +++ b/include/query/engine.hpp @@ -1,5 +1,7 @@ #pragma once +#include + #include "database/db.hpp" #include "logging/default.hpp" #include "query/exception/query_engine.hpp" @@ -13,6 +15,8 @@ * -> [code_compiler] -> code_executor */ +namespace fs = std::experimental::filesystem; + // query engine has to be aware of the Stream because Stream // is passed to the dynamic shared library template @@ -44,6 +48,17 @@ public: } } + // preload functionality + auto load(const uint64_t hash, const fs::path& path) + { + program_loader.load(hash, path); + } + + auto load(const std::string& query) + { + program_loader.load(query); + } + protected: Logger logger; diff --git a/include/query/plan/compiler.hpp b/include/query/plan/compiler.hpp index f63983504..6cc2fb8fb 100644 --- a/include/query/plan/compiler.hpp +++ b/include/query/plan/compiler.hpp @@ -20,9 +20,6 @@ public: std::string flags; // TODO: sync this with cmake configuration -#ifdef BARRIER - flags += " -DBARRIER"; -#endif #ifdef NDEBUG flags += " -DNDEBUG -O2"; #endif @@ -53,9 +50,6 @@ public: "-I../include", "-I../libs/fmt", // TODO: load from config "-I../../libs/fmt", "-L./ -L../", -#ifdef BARRIER - "-lbarrier_pic", -#endif "-lmemgraph_pic", "-shared -fPIC" // shared library flags ); @@ -67,6 +61,8 @@ public: // if compilation has failed throw exception if (compile_status == -1) { + logger.debug("FAIL: Query Code Compilation: {} -> {}", in_file, + out_file); throw PlanCompilationException( "Code compilation error. Generated code is not compilable or " "compilation settings are wrong"); diff --git a/include/query/plan/program_loader.hpp b/include/query/plan/program_loader.hpp index 7e5e77530..8b5182f73 100644 --- a/include/query/plan/program_loader.hpp +++ b/include/query/plan/program_loader.hpp @@ -3,6 +3,7 @@ #include #include #include +#include #include "config/config.hpp" #include "logging/default.hpp" @@ -16,6 +17,8 @@ #include "utils/file.hpp" #include "utils/hashing/fnv.hpp" +namespace fs = std::experimental::filesystem; + template class ProgramLoader { @@ -26,6 +29,16 @@ public: ProgramLoader() : logger(logging::log->logger("PlanLoader")) {} + // TODO: decouple load(query) method + + auto load(const uint64_t hash, const fs::path &path) + { + // TODO: get lib path (that same folder as path folder or from config) + // TODO: compile + // TODO: dispose the old lib + // TODO: store the compiled lib + } + auto load(const std::string &query) { auto preprocessed = preprocessor.preprocess(query); diff --git a/include/storage/edge_x_vertex.hpp b/include/storage/edge_x_vertex.hpp index 0bdde5d30..382fe4a0c 100644 --- a/include/storage/edge_x_vertex.hpp +++ b/include/storage/edge_x_vertex.hpp @@ -8,8 +8,6 @@ auto VertexAccessor::out() const { DbTransaction &t = this->db; - std::cout << "VA OUT" << std::endl; - std::cout << record->data.out.size() << std::endl; return iter::make_map(iter::make_iter_ref(record->data.out), [&](auto e) -> auto { return EdgeAccessor(*e, t); }); } diff --git a/include/utils/exceptions/not_yet_implemented.hpp b/include/utils/exceptions/not_yet_implemented.hpp index b9b4b563b..e4854287d 100644 --- a/include/utils/exceptions/not_yet_implemented.hpp +++ b/include/utils/exceptions/not_yet_implemented.hpp @@ -6,4 +6,6 @@ class NotYetImplemented : public BasicException { public: using BasicException::BasicException; + + NotYetImplemented() : BasicException("") {} }; diff --git a/poc/queries/astar.hpp b/poc/queries/astar.hpp index 4e6a04fc8..88f5fe4f9 100644 --- a/poc/queries/astar.hpp +++ b/poc/queries/astar.hpp @@ -14,9 +14,13 @@ using std::endl; // Dressipi astar query of 4 clicks. +// TODO: push down appropriate using Stream = std::ostream; +// TODO: figure out from the pattern in a query constexpr size_t max_depth = 3; + +// TODO: from query LIMIT 10 constexpr size_t limit = 10; class Node @@ -79,10 +83,12 @@ void astar(DbAccessor &t, plan_args_t &args, Stream &stream) auto cmp = [](Node *left, Node *right) { return left->cost > right->cost; }; std::priority_queue, decltype(cmp)> queue(cmp); + // TODO: internal id independent auto start_vr = t.vertex_find(Id(args[0].as().value())); if (!start_vr.is_present()) { - // stream.write_failure({{}}); + // TODO: stream failure + return; } @@ -96,15 +102,19 @@ void astar(DbAccessor &t, plan_args_t &args, Stream &stream) auto now = queue.top(); queue.pop(); - if (max_depth <= now->depth) + if (now->depth >= max_depth) { - // stream.write_success_empty(); - // best.push_back(now); + // TODO: stream the result + count++; + if (count >= limit) { + // the limit was reached -> STOP the execution break; } + + // if the limit wasn't reached -> POP the next vertex continue; } @@ -130,6 +140,8 @@ public: { DbAccessor t(db); + // TODO: find node + astar(t, args, stream); return t.commit(); diff --git a/release/alpha.sh b/release/alpha.sh index 928c39ffe..c9d99041d 100755 --- a/release/alpha.sh +++ b/release/alpha.sh @@ -2,6 +2,8 @@ # Initial version of script that is going to be used for release build. +# NOTE: do not run this script as a super user + # TODO: enable options related to lib echo "Memgraph Release Building..." @@ -33,12 +35,14 @@ mkdir -p ../release/${exe_name} # copy all relevant files cp ${exe_name} ../release/${exe_name}/memgraph cp libmemgraph_pic.a ../release/${exe_name}/libmemgraph_pic.a +rm -rf ../release/${exe_name}/include cp -r include ../release/${exe_name}/include cp -r template ../release/${exe_name}/template cp -r ../config ../release/${exe_name}/config # create compiled folder and copy hard coded queries mkdir -p ../release/${exe_name}/compiled/cpu/hardcode +rm -rf ../release/${exe_name}/compiled/cpu/hardcode/* cp ../tests/integration/hardcoded_query/*.cpp ../release/${exe_name}/compiled/cpu/hardcode cp ../tests/integration/hardcoded_query/*.hpp ../release/${exe_name}/compiled/cpu/hardcode diff --git a/tests/data/queries/core/dressipi.txt b/tests/data/queries/core/dressipi_basic.txt similarity index 100% rename from tests/data/queries/core/dressipi.txt rename to tests/data/queries/core/dressipi_basic.txt diff --git a/tests/data/queries/core/dressipi_graph.txt b/tests/data/queries/core/dressipi_graph.txt new file mode 100644 index 000000000..9077a9411 --- /dev/null +++ b/tests/data/queries/core/dressipi_graph.txt @@ -0,0 +1,70 @@ +CREATE (g:garment {garment_id: 1234, garment_category_id: 1, conceals: 30}) RETURN g +MATCH(g:garment {garment_id: 1234}) SET g:AA RETURN g +MATCH(g:garment {garment_id: 1234}) SET g:BB RETURN g +MATCH(g:garment {garment_id: 1234}) SET g:EE RETURN g +CREATE (g:garment {garment_id: 2345, garment_category_id: 6, reveals: 10}) RETURN g +MATCH(g:garment {garment_id: 2345}) SET g:CC RETURN g +MATCH(g:garment {garment_id: 2345}) SET g:DD RETURN g +CREATE (g:garment {garment_id: 3456, garment_category_id: 8}) RETURN g +MATCH(g:garment {garment_id: 3456}) SET g:CC RETURN g +MATCH(g:garment {garment_id: 3456}) SET g:DD RETURN g +CREATE (g:garment {garment_id: 4567, garment_category_id: 15}) RETURN g +MATCH(g:garment {garment_id: 4567}) SET g:AA RETURN g +MATCH(g:garment {garment_id: 4567}) SET g:BB RETURN g +MATCH(g:garment {garment_id: 4567}) SET g:DD RETURN g +CREATE (g:garment {garment_id: 5678, garment_category_id: 19}) RETURN g +MATCH(g:garment {garment_id: 5678}) SET g:BB RETURN g +MATCH(g:garment {garment_id: 5678}) SET g:CC RETURN g +MATCH(g:garment {garment_id: 5678}) SET g:EE RETURN g +CREATE (g:garment {garment_id: 6789, garment_category_id: 3}) RETURN g +MATCH(g:garment {garment_id: 6789}) SET g:AA RETURN g +MATCH(g:garment {garment_id: 6789}) SET g:DD RETURN g +MATCH(g:garment {garment_id: 6789}) SET g:EE RETURN g +CREATE (g:garment {garment_id: 7890, garment_category_id: 25}) RETURN g +MATCH(g:garment {garment_id: 7890}) SET g:AA RETURN g +MATCH(g:garment {garment_id: 7890}) SET g:BB RETURN g +MATCH(g:garment {garment_id: 7890}) SET g:CC RETURN g +MATCH(g:garment {garment_id: 7890}) SET g:EE RETURN g + +MATCH (g1:garment {garment_id: 1234}), (g2:garment {garment_id: 4567}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r +MATCH (g1:garment {garment_id: 1234}), (g2:garment {garment_id: 5678}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r +MATCH (g1:garment {garment_id: 1234}), (g2:garment {garment_id: 6789}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r +MATCH (g1:garment {garment_id: 1234}), (g2:garment {garment_id: 7890}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r +MATCH (g1:garment {garment_id: 4567}), (g2:garment {garment_id: 6789}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r +MATCH (g1:garment {garment_id: 4567}), (g2:garment {garment_id: 7890}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r +MATCH (g1:garment {garment_id: 4567}), (g2:garment {garment_id: 5678}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r +MATCH (g1:garment {garment_id: 6789}), (g2:garment {garment_id: 7890}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r +MATCH (g1:garment {garment_id: 5678}), (g2:garment {garment_id: 7890}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r +MATCH (g1:garment {garment_id: 2345}), (g2:garment {garment_id: 3456}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r +MATCH (g1:garment {garment_id: 2345}), (g2:garment {garment_id: 5678}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r +MATCH (g1:garment {garment_id: 2345}), (g2:garment {garment_id: 6789}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r +MATCH (g1:garment {garment_id: 2345}), (g2:garment {garment_id: 7890}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r +MATCH (g1:garment {garment_id: 2345}), (g2:garment {garment_id: 4567}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r +MATCH (g1:garment {garment_id: 3456}), (g2:garment {garment_id: 5678}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r +MATCH (g1:garment {garment_id: 3456}), (g2:garment {garment_id: 6789}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r +MATCH (g1:garment {garment_id: 3456}), (g2:garment {garment_id: 7890}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r +MATCH (g1:garment {garment_id: 3456}), (g2:garment {garment_id: 4567}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r + +CREATE (p:profile {profile_id: 111, partner_id: 55, reveals: 30}) RETURN p +CREATE (p:profile {profile_id: 112, partner_id: 55}) RETURN p +CREATE (p:profile {profile_id: 112, partner_id: 77, conceals: 10}) RETURN p + +MATCH (p:profile {profile_id: 111, partner_id: 55}), (g:garment {garment_id: 1234}) CREATE (p)-[s:score]->(g) SET s.score=1500 RETURN s +MATCH (p:profile {profile_id: 111, partner_id: 55}), (g:garment {garment_id: 2345}) CREATE (p)-[s:score]->(g) SET s.score=1200 RETURN s +MATCH (p:profile {profile_id: 111, partner_id: 55}), (g:garment {garment_id: 3456}) CREATE (p)-[s:score]->(g) SET s.score=1000 RETURN s +MATCH (p:profile {profile_id: 111, partner_id: 55}), (g:garment {garment_id: 4567}) CREATE (p)-[s:score]->(g) SET s.score=1000 RETURN s +MATCH (p:profile {profile_id: 111, partner_id: 55}), (g:garment {garment_id: 6789}) CREATE (p)-[s:score]->(g) SET s.score=1500 RETURN s +MATCH (p:profile {profile_id: 111, partner_id: 55}), (g:garment {garment_id: 7890}) CREATE (p)-[s:score]->(g) SET s.score=1800 RETURN s +MATCH (p:profile {profile_id: 112, partner_id: 55}), (g:garment {garment_id: 1234}) CREATE (p)-[s:score]->(g) SET s.score=2000 RETURN s +MATCH (p:profile {profile_id: 112, partner_id: 55}), (g:garment {garment_id: 4567}) CREATE (p)-[s:score]->(g) SET s.score=1500 RETURN s +MATCH (p:profile {profile_id: 112, partner_id: 55}), (g:garment {garment_id: 5678}) CREATE (p)-[s:score]->(g) SET s.score=1000 RETURN s +MATCH (p:profile {profile_id: 112, partner_id: 55}), (g:garment {garment_id: 6789}) CREATE (p)-[s:score]->(g) SET s.score=1600 RETURN s +MATCH (p:profile {profile_id: 112, partner_id: 55}), (g:garment {garment_id: 7890}) CREATE (p)-[s:score]->(g) SET s.score=1900 RETURN s +MATCH (p:profile {profile_id: 112, partner_id: 77}), (g:garment {garment_id: 1234}) CREATE (p)-[s:score]->(g) SET s.score=1500 RETURN s +MATCH (p:profile {profile_id: 112, partner_id: 77}), (g:garment {garment_id: 2345}) CREATE (p)-[s:score]->(g) SET s.score=1300 RETURN s +MATCH (p:profile {profile_id: 112, partner_id: 77}), (g:garment {garment_id: 3456}) CREATE (p)-[s:score]->(g) SET s.score=1300 RETURN s +MATCH (p:profile {profile_id: 112, partner_id: 77}), (g:garment {garment_id: 5678}) CREATE (p)-[s:score]->(g) SET s.score=1200 RETURN s +MATCH (p:profile {profile_id: 112, partner_id: 77}), (g:garment {garment_id: 6789}) CREATE (p)-[s:score]->(g) SET s.score=1700 RETURN s +MATCH (p:profile {profile_id: 112, partner_id: 77}), (g:garment {garment_id: 7890}) CREATE (p)-[s:score]->(g) SET s.score=1900 RETURN s + +MATCH (a:garment)-[:default_outfit]-(b:garment)-[:default_outfit]-(c:garment)-[:default_outfit]-(d:garment)-[:default_outfit]-(a:garment)-[:default_outfit]-(c:garment), (b:garment)-[:default_outfit]-(d:garment) WHERE a.garment_id = 1234 RETURN a.garment_id, b.garment_id, c.garment_id, d.garment_id ORDER BY (a.score + b.score + c.score + d.score) DESC LIMIT 10 diff --git a/tests/data/queries/core/basic.txt b/tests/data/queries/core/mg_basic.txt similarity index 100% rename from tests/data/queries/core/basic.txt rename to tests/data/queries/core/mg_basic.txt diff --git a/tests/data/queries/core/pool.txt b/tests/data/queries/core/pool.txt deleted file mode 100644 index 71f8fd00b..000000000 --- a/tests/data/queries/core/pool.txt +++ /dev/null @@ -1,2 +0,0 @@ -MERGE (g1:garment {garment_id: 1234})-[r:default_outfit]-(g2:garment {garment_id: 2345}) RETURN r -MATCH (p:profile {profile_id: 111, partner_id: 55})-[s:score]-(g.garment {garment_id: 1234}) DELETE s diff --git a/tests/integration/_hardcoded_query/dressipi.hpp b/tests/integration/_hardcoded_query/dressipi.hpp index 3f7d1d6b3..63e7023e4 100644 --- a/tests/integration/_hardcoded_query/dressipi.hpp +++ b/tests/integration/_hardcoded_query/dressipi.hpp @@ -633,6 +633,10 @@ auto load_dressipi_functions(Db &db) return t.commit(); }; + // Query: MATCH (a:garment)-[:default_outfit]-(b:garment)-[:default_outfit]-(c:garment)-[:default_outfit]-(d:garment)-[:default_outfit]-(a:garment)-[:default_outfit]-(c:garment), (b:garment)-[:default_outfit]-(d:garment) WHERE a.garment_id = 1234 RETURN a.garment_id, b.garment_id, c.garment_id, d.garment_id ORDER BY (a.score + b.score + c.score + d.score) DESC LIMIT 10 + // Hash: 11856262817829095719 + // TODO: automate + return functions; } } diff --git a/tests/integration/hardcoded_query/11856262817829095719.cpp b/tests/integration/hardcoded_query/11856262817829095719.cpp new file mode 100644 index 000000000..9c9ec16ed --- /dev/null +++ b/tests/integration/hardcoded_query/11856262817829095719.cpp @@ -0,0 +1,188 @@ +#include +#include +#include +#include + +#include "query/i_plan_cpu.hpp" +#include "query/util.hpp" +#include "storage/edge_x_vertex.hpp" +#include "storage/model/properties/all.hpp" +#include "storage/vertex_accessor.hpp" +#include "using.hpp" +#include "utils/memory/stack_allocator.hpp" + +using std::cout; +using std::endl; + +// Dressipi astar query of 4 clicks. + +// TODO: figure out from the pattern in a query +constexpr size_t max_depth = 3; + +// TODO: from query LIMIT 10 +constexpr size_t limit = 10; + +class Node +{ +public: + Node *parent = {nullptr}; + VertexPropertyType tkey; + double cost; + int depth = {0}; + double sum = {0.0}; + VertexAccessor vacc; + + Node(VertexAccessor vacc, double cost, + VertexPropertyType const &tkey) + : cost(cost), vacc(vacc), tkey(tkey) + { + } + Node(VertexAccessor vacc, double cost, Node *parent, + VertexPropertyType const &tkey) + : cost(cost), vacc(vacc), parent(parent), depth(parent->depth + 1), + tkey(tkey) + { + } + + double sum_vertex_score() + { + auto now = this; + double sum = 0; + do + { + sum += (now->vacc.at(tkey).get())->value(); + now = now->parent; + } while (now != nullptr); + this->sum = sum; + return sum; + } +}; + +bool vertex_filter_contained(DbAccessor &t, VertexAccessor &v, Node *before) +{ + if (v.fill()) + { + bool found; + do + { + found = false; + before = before->parent; + if (before == nullptr) + { + return true; + } + } while (v.in_contains(before->vacc)); + } + return false; +} + +template +auto astar(VertexAccessor &va, DbAccessor &t, plan_args_t &, Stream &) +{ + StackAllocator stack; + std::vector results; + + // TODO: variable part (extract) + VertexPropertyType tkey = t.vertex_property_key("score"); + + auto cmp = [](Node *left, Node *right) { return left->cost > right->cost; }; + std::priority_queue, decltype(cmp)> queue(cmp); + + Node *start = new (stack.allocate()) Node(va, 0, tkey); + queue.push(start); + + size_t count = 0; + do + { + auto now = queue.top(); + queue.pop(); + + if (now->depth >= max_depth) + { + results.emplace_back(now); + + count++; + + if (count >= limit) + { + // the limit was reached -> STOP the execution + break; + } + + // if the limit wasn't reached -> POP the next vertex + continue; + } + + iter::for_all(now->vacc.out(), [&](auto edge) { + VertexAccessor va = edge.to(); + if (vertex_filter_contained(t, va, now)) + { + auto cost = 1 - va.at(tkey).get()->value(); + Node *n = new (stack.allocate()) + Node(va, now->cost + cost, now, tkey); + queue.push(n); + } + }); + } while (!queue.empty()); + + stack.free(); + + return results; +} + +class PlanCPU : public IPlanCPU +{ +public: + bool run(Db &db, plan_args_t &args, Stream &stream) override + { + DbAccessor t(db); + + indices_t indices = {{"garment_id", 0}}; + auto properties = query_properties(indices, args); + + auto &label = t.label_find_or_create("garment"); + auto garment_id_prop_key = + t.vertex_property_key("garment_id", args[0].key.flags()); + + stream.write_fields( + {{"a.garment_id", "b.garment_id", "c.garment_id", "d.garment_id"}}); + + label.index() + .for_range(t) + .properties_filter(t, properties) + .for_all([&](auto va) { + auto results = astar(va, t, args, stream); + for (auto node : results) + { + node->sum_vertex_score(); + } + std::sort(results.begin(), results.end(), + [](Node *a, Node *b) { return a->sum < b->sum; }); + for (auto node : results) + { + stream.write_record(); + stream.write_list_header(max_depth + 1); + auto current_node = node; + do + { + // TODO: get property but reverser order + stream.write(current_node->vacc.at(garment_id_prop_key) + .template as()); + current_node = current_node->parent; + } while (current_node != nullptr); + } + + }); + + stream.write_empty_fields(); + stream.write_meta("r"); + + return t.commit(); + } + + ~PlanCPU() {} +}; + +extern "C" IPlanCPU *produce() { return new PlanCPU(); } + +extern "C" void destruct(IPlanCPU *p) { delete p; } diff --git a/tests/integration/queries.cpp b/tests/integration/queries.cpp index 9b611c798..3b30a7515 100644 --- a/tests/integration/queries.cpp +++ b/tests/integration/queries.cpp @@ -8,6 +8,7 @@ #include "utils/string/file.hpp" #include "utils/variadic/variadic.hpp" #include "utils/command_line/arguments.hpp" +#include "stream/print_record_stream.hpp" Logger logger; @@ -15,10 +16,14 @@ int main(int argc, char *argv[]) { auto arguments = all_arguments(argc, argv); + PrintRecordStream stream(std::cout); + // POSSIBILITIES: basic, dressipi auto suite_name = get_argument(arguments, "-s", "basic"); // POSSIBILITIES: query_execution, hash_generation auto work_mode = get_argument(arguments, "-w", "query_execution"); + // POSSIBILITIES: mg_basic.txt, dressipi_basic.txt, dressipi_graph.txt + auto query_set_filename = get_argument(arguments, "-q", "mg_basic.txt"); // init logging logging::init_sync(); @@ -39,7 +44,7 @@ int main(int argc, char *argv[]) auto stripper = make_query_stripper(TK_LONG, TK_FLOAT, TK_STR, TK_BOOL); // load quries - std::string file_path = "data/queries/core/" + suite_name + ".txt"; + std::string file_path = "data/queries/core/" + query_set_filename; auto queries = utils::read_lines(file_path.c_str()); // execute all queries diff --git a/tests/integration/stream/print_record_stream.hpp b/tests/integration/stream/print_record_stream.hpp new file mode 100644 index 000000000..23cf772bd --- /dev/null +++ b/tests/integration/stream/print_record_stream.hpp @@ -0,0 +1,140 @@ +#pragma once + +#include +#include +#include + +#include "utils/exceptions/not_yet_implemented.hpp" + +class PrintRecordStream +{ +private: + std::ostream& stream; + +public: + PrintRecordStream(std::ostream &stream) : stream(stream) {} + + void write_success() + { + stream << "SUCCESS\n"; + } + + void write_success_empty() + { + stream << "SUCCESS EMPTY\n"; + } + + void write_ignored() + { + stream << "IGNORED\n"; + } + + void write_empty_fields() + { + stream << "EMPTY FIELDS\n"; + } + + void write_fields(const std::vector &fields) + { + stream << "FIELDS:"; + for (auto &field : fields) + { + stream << " " << field; + } + stream << '\n'; + } + + void write_field(const std::string &field) + { + stream << "Field: " << field << '\n'; + } + + void write_list_header(size_t size) + { + stream << "List: " << size << '\n'; + } + + void write_record() + { + stream << "Record\n"; + } + + void write_meta(const std::string &type) + { + stream << "Meta: " << type; + } + + void write_failure(const std::map &data) + { + throw NotYetImplemented(); + } + + void write_count(const size_t count) + { + throw NotYetImplemented(); + } + + void write(const VertexAccessor &vertex) + { + throw NotYetImplemented(); + } + + void write_vertex_record(const VertexAccessor& va) + { + throw NotYetImplemented(); + } + + void write(const EdgeAccessor &edge) + { + throw NotYetImplemented(); + } + + void write_edge_record(const EdgeAccessor& ea) + { + throw NotYetImplemented(); + } + + void write(const StoredProperty &prop) + { + // prop.accept(serializer); + throw NotYetImplemented(); + } + + void write(const StoredProperty &prop) + { + // prop.accept(serializer); + throw NotYetImplemented(); + } + + void write(const Null &prop) + { + throw NotYetImplemented(); + } + + void write(const Bool &prop) + { + throw NotYetImplemented(); + } + + void write(const Float &prop) { throw NotYetImplemented(); } + void write(const Int32 &prop) { throw NotYetImplemented(); } + void write(const Int64 &prop) { throw NotYetImplemented(); } + void write(const Double &prop) { throw NotYetImplemented(); } + void write(const String &prop) { throw NotYetImplemented(); } + void write(const ArrayBool &prop) { throw NotYetImplemented(); } + void write(const ArrayInt32 &prop) { throw NotYetImplemented(); } + void write(const ArrayInt64 &prop) { throw NotYetImplemented(); } + void write(const ArrayFloat &prop) { throw NotYetImplemented(); } + void write(const ArrayDouble &prop) { throw NotYetImplemented(); } + void write(const ArrayString &prop) { throw NotYetImplemented(); } + + void send() + { + throw NotYetImplemented(); + } + + void chunk() + { + throw NotYetImplemented(); + } +}; From 1613d813c54fc20fb11f215b483d59e7c7738d92 Mon Sep 17 00:00:00 2001 From: Marko Budiselic Date: Wed, 14 Dec 2016 12:32:56 +0100 Subject: [PATCH 04/26] The integration tests are fixed. Fixes T161 --- tests/CMakeLists.txt | 44 ++++++------ tests/data/queries/core/basic.txt | 29 ++++---- tests/integration/_hardcoded_query/basic.hpp | 74 ++++++++++++-------- 3 files changed, 83 insertions(+), 64 deletions(-) diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 238dfd423..4f6669334 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -58,6 +58,8 @@ endforeach() ## INTEGRATION TESTS +# TODO: move to tests/integration folder + # test hard coded queries add_executable(integration_queries integration/queries.cpp) target_link_libraries(integration_queries stdc++fs) @@ -69,31 +71,31 @@ add_test(NAME integration_queries COMMAND integration_queries) set_property(TARGET integration_queries PROPERTY CXX_STANDARD 14) # test cleaning methods -add_executable(cleaning integration/cleaning.cpp) -target_link_libraries(cleaning memgraph) -target_link_libraries(cleaning Threads::Threads) -target_link_libraries(cleaning ${fmt_static_lib}) -target_link_libraries(cleaning ${yaml_static_lib}) -add_test(NAME cleaning COMMAND cleaning) -set_property(TARGET cleaning PROPERTY CXX_STANDARD 14) +add_executable(integration_cleaning integration/cleaning.cpp) +target_link_libraries(integration_cleaning memgraph) +target_link_libraries(integration_cleaning Threads::Threads) +target_link_libraries(integration_cleaning ${fmt_static_lib}) +target_link_libraries(integration_cleaning ${yaml_static_lib}) +add_test(NAME integration_cleaning COMMAND integration_cleaning) +set_property(TARGET integration_cleaning PROPERTY CXX_STANDARD 14) # test snapshot validity -add_executable(snapshot integration/snapshot.cpp) -target_link_libraries(snapshot memgraph) -target_link_libraries(snapshot Threads::Threads) -target_link_libraries(snapshot ${fmt_static_lib}) -target_link_libraries(snapshot ${yaml_static_lib}) -add_test(NAME snapshot COMMAND snapshot) -set_property(TARGET snapshot PROPERTY CXX_STANDARD 14) +add_executable(integration_snapshot integration/snapshot.cpp) +target_link_libraries(integration_snapshot memgraph) +target_link_libraries(integration_snapshot Threads::Threads) +target_link_libraries(integration_snapshot ${fmt_static_lib}) +target_link_libraries(integration_snapshot ${yaml_static_lib}) +add_test(NAME integration_snapshot COMMAND integration_snapshot) +set_property(TARGET integration_snapshot PROPERTY CXX_STANDARD 14) # test index validity -add_executable(index integration/index.cpp) -target_link_libraries(index memgraph) -target_link_libraries(index Threads::Threads) -target_link_libraries(index ${fmt_static_lib}) -target_link_libraries(index ${yaml_static_lib}) -add_test(NAME index COMMAND index) -set_property(TARGET index PROPERTY CXX_STANDARD 14) +add_executable(integration_index integration/index.cpp) +target_link_libraries(integration_index memgraph) +target_link_libraries(integration_index Threads::Threads) +target_link_libraries(integration_index ${fmt_static_lib}) +target_link_libraries(integration_index ${yaml_static_lib}) +add_test(NAME integration_index COMMAND integration_index) +set_property(TARGET integration_index PROPERTY CXX_STANDARD 14) ## MANUAL TESTS diff --git a/tests/data/queries/core/basic.txt b/tests/data/queries/core/basic.txt index 74add328a..6790318b4 100644 --- a/tests/data/queries/core/basic.txt +++ b/tests/data/queries/core/basic.txt @@ -2,26 +2,29 @@ CREATE (n:LABEL {name: "TEST01"}) RETURN n CREATE (n:LABEL {name: "TEST02"}) RETURN n CREATE (n:LABEL {name: "TEST2"}) RETURN n CREATE (n:LABEL {name: "TEST3"}) RETURN n -CREATE (n:OTHER {name: "TEST4"}) RETURN n CREATE (n:ACCOUNT {id: 2322, name: "TEST", country: "Croatia", "created_at": 2352352}) RETURN n -MATCH (n {id: 0}) RETURN n", "MATCH (n {id: 1}) RETURN n -MATCH (n {id: 2}) RETURN n", "MATCH (n {id: 3}) RETURN n +MATCH (n {id: 0}) RETURN n +MATCH (n {id: 1}) RETURN n +MATCH (n {id: 2}) RETURN n +MATCH (n {id: 3}) RETURN n MATCH (a {id:0}), (p {id: 1}) CREATE (a)-[r:IS]->(p) RETURN r MATCH (a {id:1}), (p {id: 2}) CREATE (a)-[r:IS]->(p) RETURN r MATCH ()-[r]-() WHERE ID(r)=0 RETURN r MATCH ()-[r]-() WHERE ID(r)=1 RETURN r -MATCH (n: {id: 0}) SET n.name = "TEST100" RETURN n -MATCH (n: {id: 1}) SET n.name = "TEST101" RETURN n -MATCH (n: {id: 0}) SET n.name = "TEST102" RETURN n -MATCH (n:LABEL) RETURN n" +MATCH (n {id: 0}) SET n.name = "TEST100" RETURN n +MATCH (n {id: 1}) SET n.name = "TEST101" RETURN n +MATCH (n {id: 0}) SET n.name = "TEST102" RETURN n +MATCH (n:LABEL) RETURN n MATCH (n1), (n2) WHERE ID(n1)=0 AND ID(n2)=1 CREATE (n1)<-[r:IS {age: 25,weight: 70}]-(n2) RETURN r -MATCH (n) RETURN n", "MATCH (n:LABEL) RETURN n", "MATCH (n) DELETE n -MATCH (n:LABEL) DELETE n", "MATCH (n) WHERE ID(n) = 0 DELETE n -MATCH ()-[r]-() WHERE ID(r) = 0 DELETE r", "MATCH ()-[r]-() DELETE r +MATCH (n) RETURN n +MATCH (n:LABEL) RETURN n +MATCH (n) DELETE n +MATCH (n:LABEL) DELETE n +MATCH (n) WHERE ID(n) = 0 DELETE n +MATCH ()-[r]-() WHERE ID(r) = 0 DELETE r +MATCH ()-[r]-() DELETE r MATCH ()-[r:TYPE]-() DELETE r MATCH (n)-[:TYPE]->(m) WHERE ID(n) = 0 RETURN m MATCH (n)-[:TYPE]->(m) WHERE n.name = "kruno" RETURN m MATCH (n)-[:TYPE]->(m) WHERE n.name = "kruno" RETURN n,m -MATCH (n:LABEL)-[:TYPE]->(m) RETURN n" -CREATE (n:LABEL1:LABEL2 {name: "TEST01", age: 20}) RETURN n -MATCH (n:LABEL1:LABEL2 {name: "TEST01", age: 20}) RETURN n +MATCH (n:LABEL)-[:TYPE]->(m) RETURN n diff --git a/tests/integration/_hardcoded_query/basic.hpp b/tests/integration/_hardcoded_query/basic.hpp index e9c71f022..6bd89230f 100644 --- a/tests/integration/_hardcoded_query/basic.hpp +++ b/tests/integration/_hardcoded_query/basic.hpp @@ -17,7 +17,7 @@ auto load_basic_functions(Db &db) vertex_accessor.set(property_key, std::move(args[0])); return t.commit(); }; - functions[11597417457737499503u] = create_node; + functions[3191791685918807343u] = create_node; // CREATE (n:LABEL {name: "TEST"}) RETURN n; auto create_labeled_and_named_node = [&db](properties_t &&args) { @@ -29,6 +29,19 @@ auto load_basic_functions(Db &db) vertex_accessor.add_label(label); return t.commit(); }; + functions[8273374963505210457u] = create_labeled_and_named_node; + + // CREATE (n:OTHER {name: "cleaner_test"}) RETURN n + auto create_node_with_other_label = [&db](properties_t &&args) { + DbAccessor t(db); + auto property_key = t.vertex_property_key("name", args[0].key.flags()); + auto &label = t.label_find_or_create("OTHER"); + auto vertex_accessor = t.vertex_insert(); + vertex_accessor.set(property_key, std::move(args[0])); + vertex_accessor.add_label(label); + return t.commit(); + }; + functions[6237439055665132277u] = create_node_with_other_label; // CREATE (n:OTHER {name: "TEST"}) RETURN n; auto create_labeled_and_named_node_v2 = [&db](properties_t &&args) { @@ -40,7 +53,9 @@ auto load_basic_functions(Db &db) vertex_accessor.add_label(label); return t.commit(); }; + functions[832997784138269151u] = create_labeled_and_named_node_v2; + // CREATE (n:ACCOUNT {id: 2322, name: "TEST", country: "Croatia", "created_at": 2352352}) RETURN n auto create_account = [&db](properties_t &&args) { DbAccessor t(db); auto prop_id = t.vertex_property_key("id", args[0].key.flags()); @@ -58,7 +73,12 @@ auto load_basic_functions(Db &db) vertex_accessor.add_label(label); return t.commit(); }; + functions[16701745788564313211u] = create_account; + // TODO: inconsistency but it doesn't affect the integration tests + // this is not a unique case + // MATCH (n) WHERE ID(n) = 1 RETURN n + // MATCH (n {id: 0}) RETURN n auto find_node_by_internal_id = [&db](properties_t &&args) { DbAccessor t(db); auto maybe_va = t.vertex_find(Id(args[0].as().value())); @@ -75,7 +95,10 @@ auto load_basic_functions(Db &db) } return t.commit(); }; + functions[1444315501940151196u] = find_node_by_internal_id; + functions[11624983287202420303u] = find_node_by_internal_id; + // MATCH (a {id:0}), (p {id: 1}) CREATE (a)-[r:IS]->(p) RETURN r auto create_edge = [&db](properties_t &&args) { DbAccessor t(db); auto &edge_type = t.type_find_or_create("IS"); @@ -98,7 +121,9 @@ auto load_basic_functions(Db &db) return ret; }; + functions[6972641167053231355u] = create_edge; + // MATCH ()-[r]-() WHERE ID(r)=0 RETURN r auto find_edge_by_internal_id = [&db](properties_t &&args) { DbAccessor t(db); auto maybe_ea = t.edge_find(args[0].as().value()); @@ -122,7 +147,9 @@ auto load_basic_functions(Db &db) return t.commit(); }; + functions[15080095524051312786u] = find_edge_by_internal_id; + // MATCH (n {id: 0}) SET n.name = "TEST102" RETURN n auto update_node = [&db](properties_t &&args) { DbAccessor t(db); auto prop_name = t.vertex_property_key("name", args[1].key.flags()); @@ -136,6 +163,7 @@ auto load_basic_functions(Db &db) return t.commit(); }; + functions[2835161674800069655u] = update_node; // MATCH (n1), (n2) WHERE ID(n1)=0 AND ID(n2)=1 CREATE (n1)<-[r:IS {age: 25, // weight: 70}]-(n2) RETURN r @@ -157,6 +185,7 @@ auto load_basic_functions(Db &db) return t.commit(); }; + functions[10360716473890539004u] = create_edge_v2; // MATCH (n) RETURN n auto match_all_nodes = [&db](properties_t &&args) { @@ -167,6 +196,7 @@ auto load_basic_functions(Db &db) return t.commit(); }; + functions[5949923385370229113u] = match_all_nodes; // MATCH (n:LABEL) RETURN n auto match_by_label = [&db](properties_t &&args) { @@ -181,6 +211,7 @@ auto load_basic_functions(Db &db) return t.commit(); }; + functions[16533049303627288013u] = match_by_label; // MATCH (n) DELETE n auto match_all_delete = [&db](properties_t &&args) { @@ -196,6 +227,7 @@ auto load_basic_functions(Db &db) return t.commit(); }; + functions[16628411757092333638u] = match_all_delete; // MATCH (n:LABEL) DELETE n auto match_label_delete = [&db](properties_t &&args) { @@ -208,6 +240,7 @@ auto load_basic_functions(Db &db) return t.commit(); }; + functions[10022871879682099034u] = match_label_delete; // MATCH (n) WHERE ID(n) = id DELETE n auto match_id_delete = [&db](properties_t &&args) { @@ -221,6 +254,7 @@ auto load_basic_functions(Db &db) return t.commit(); }; + functions[5375628876334795080u] = match_id_delete; // MATCH ()-[r]-() WHERE ID(r) = id DELETE r auto match_edge_id_delete = [&db](properties_t &&args) { @@ -234,15 +268,17 @@ auto load_basic_functions(Db &db) return t.commit(); }; + functions[11747491556476630933u] = match_edge_id_delete; // MATCH ()-[r]-() DELETE r - auto match_edge_all_delete = [&db](properties_t &&args) { + auto match_edge_all_delete = [&db](properties_t &&) { DbAccessor t(db); t.edge_access().fill().for_all([&](auto a) { a.remove(); }); return t.commit(); }; + functions[10064744449500095415u] = match_edge_all_delete; // MATCH ()-[r:TYPE]-() DELETE r auto match_edge_type_delete = [&db](properties_t &&args) { @@ -254,6 +290,7 @@ auto load_basic_functions(Db &db) return t.commit(); }; + functions[6084209470626828855u] = match_edge_type_delete; // MATCH (n)-[:TYPE]->(m) WHERE ID(n) = id RETURN m auto match_id_type_return = [&db](properties_t &&args) { @@ -275,6 +312,7 @@ auto load_basic_functions(Db &db) return t.commit(); }; + functions[2605621337795673948u] = match_id_type_return; // MATCH (n)-[:TYPE]->(m) WHERE n.name = "kruno" RETURN m auto match_name_type_return = [&db](properties_t &&args) { @@ -313,6 +351,7 @@ auto load_basic_functions(Db &db) return t.commit(); }; + functions[17303982256920342123u] = match_name_type_return; // MATCH (n)-[:TYPE]->(m) WHERE n.name = "kruno" RETURN n,m auto match_name_type_return_cross = [&db](properties_t &&args) { @@ -393,6 +432,7 @@ auto load_basic_functions(Db &db) return t.commit(); }; + functions[17456874322957005665u] = match_name_type_return_cross; // MATCH (n:LABEL)-[:TYPE]->(m) RETURN n auto match_label_type_return = [&db](properties_t &&args) { @@ -433,8 +473,8 @@ auto load_basic_functions(Db &db) t.abort(); return false; } - }; + functions[4866842751631597263u] = match_label_type_return; // MATCH (n:LABEL {name: "TEST01"}) RETURN n; auto match_label_property = [&db](properties_t &&args) { @@ -454,33 +494,7 @@ auto load_basic_functions(Db &db) return false; } }; - functions[17721584194272598838u] = match_label_property; - - functions[15284086425088081497u] = match_all_nodes; - functions[4857652843629217005u] = match_by_label; - functions[15648836733456301916u] = create_edge_v2; - functions[10597108978382323595u] = create_account; - functions[5397556489557792025u] = create_labeled_and_named_node; - - // TODO: query hasher reports two hash values - functions[998725786176032607u] = create_labeled_and_named_node_v2; - functions[16090682663946456821u] = create_labeled_and_named_node_v2; - - functions[7939106225150551899u] = create_edge; - functions[6579425155585886196u] = create_edge; - functions[11198568396549106428u] = find_node_by_internal_id; - functions[8320600413058284114u] = find_edge_by_internal_id; - functions[6813335159006269041u] = update_node; - functions[10506105811763742758u] = match_all_delete; - functions[13742779491897528506u] = match_label_delete; - functions[11349462498691305864u] = match_id_delete; - functions[6963549500479100885u] = match_edge_id_delete; - functions[14897166600223619735u] = match_edge_all_delete; - functions[16888549834923624215u] = match_edge_type_delete; - functions[11675960684124428508u] = match_id_type_return; - functions[15698881472054193835u] = match_name_type_return; - functions[12595102442911913761u] = match_name_type_return_cross; - functions[8918221081398321263u] = match_label_type_return; + functions[7710665404758409302u] = match_label_property; return functions; } From 9c3a79bb188f7bc7ab372bd3af83ce1593e418fa Mon Sep 17 00:00:00 2001 From: Marko Budiselic Date: Thu, 15 Dec 2016 11:52:28 +0100 Subject: [PATCH 05/26] Dressipi release no 3 Fixes T188 --- .../hardcoded_query/11856262817829095719.cpp | 25 ++++++++----------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/tests/integration/hardcoded_query/11856262817829095719.cpp b/tests/integration/hardcoded_query/11856262817829095719.cpp index 9c9ec16ed..30c2b7396 100644 --- a/tests/integration/hardcoded_query/11856262817829095719.cpp +++ b/tests/integration/hardcoded_query/11856262817829095719.cpp @@ -99,6 +99,7 @@ auto astar(VertexAccessor &va, DbAccessor &t, plan_args_t &, Stream &) if (now->depth >= max_depth) { + now->sum_vertex_score(); results.emplace_back(now); count++; @@ -130,6 +131,14 @@ auto astar(VertexAccessor &va, DbAccessor &t, plan_args_t &, Stream &) return results; } +void reverse_stream_ids(Node *node, Stream& stream, VertexPropertyKey key) +{ + if (node == nullptr) + return; + reverse_stream_ids(node->parent, stream, key); + stream.write(node->vacc.at(key).template as()); +} + class PlanCPU : public IPlanCPU { public: @@ -152,26 +161,14 @@ public: .properties_filter(t, properties) .for_all([&](auto va) { auto results = astar(va, t, args, stream); - for (auto node : results) - { - node->sum_vertex_score(); - } std::sort(results.begin(), results.end(), - [](Node *a, Node *b) { return a->sum < b->sum; }); + [](Node *a, Node *b) { return a->sum > b->sum; }); for (auto node : results) { stream.write_record(); stream.write_list_header(max_depth + 1); - auto current_node = node; - do - { - // TODO: get property but reverser order - stream.write(current_node->vacc.at(garment_id_prop_key) - .template as()); - current_node = current_node->parent; - } while (current_node != nullptr); + reverse_stream_ids(node, stream, garment_id_prop_key); } - }); stream.write_empty_fields(); From 9154f9b7192551a0ad263b3a69a08cf0627529e6 Mon Sep 17 00:00:00 2001 From: sale Date: Fri, 16 Dec 2016 12:56:36 +0000 Subject: [PATCH 06/26] Added signal handler and refactored exception handler Summary: Added signal handler and refactored exception handler Test Plan: manual Reviewers: buda Subscribers: buda Differential Revision: https://memgraph.phacility.com/D17 --- .arclint | 8 -- include/utils/signals/handler.hpp | 41 +++++++++ include/utils/terminate_handler.hpp | 53 +++++------- src/memgraph_bolt.cpp | 130 +++++++++++++++------------- tests/unit/signal_handler.cpp | 27 ++++++ 5 files changed, 158 insertions(+), 101 deletions(-) delete mode 100644 .arclint create mode 100644 include/utils/signals/handler.hpp create mode 100644 tests/unit/signal_handler.cpp diff --git a/.arclint b/.arclint deleted file mode 100644 index 6c5a03cd2..000000000 --- a/.arclint +++ /dev/null @@ -1,8 +0,0 @@ -{ - "linters": { - "cppcheck": { - "type": "cppcheck", - "include": ["(\\.cpp$)", "(\\.hpp$)"] - } - } -} diff --git a/include/utils/signals/handler.hpp b/include/utils/signals/handler.hpp new file mode 100644 index 000000000..18d833870 --- /dev/null +++ b/include/utils/signals/handler.hpp @@ -0,0 +1,41 @@ +#include +#include +#include +#include +#include +#include +#include + +using Function = std::function; + +enum class Signal : int { + Terminate = SIGTERM, + SegmentationFault = SIGSEGV, + Interupt = SIGINT, + Quit = SIGQUIT, + Abort = SIGABRT +}; + +class SignalHandler { + private: + static std::map> handlers_; + + static void handle(int signal) { handlers_[signal](); } + + public: + static void register_handler(Signal signal, Function func) { + int signal_number = static_cast(signal); + handlers_[signal_number] = func; + std::signal(signal_number, SignalHandler::handle); + } + + // TODO possible changes if signelton needed later + /* + static SignalHandler& instance() { + static SignalHandler instance; + return instance; + } + */ +}; + +std::map> SignalHandler::handlers_ = {}; diff --git a/include/utils/terminate_handler.hpp b/include/utils/terminate_handler.hpp index 7c19396ac..7ae21d5a1 100644 --- a/include/utils/terminate_handler.hpp +++ b/include/utils/terminate_handler.hpp @@ -1,48 +1,35 @@ #pragma once #include "utils/auto_scope.hpp" +#include "utils/stacktrace.hpp" -#include #include +#include // TODO: log to local file or remote database -void stacktrace(std::ostream& stream) noexcept -{ - void* array[50]; - int size = backtrace(array, 50); +void stacktrace(std::ostream& stream) noexcept { + Stacktrace stacktrace; - stream << __FUNCTION__ << " backtrace returned " - << size << " frames." << std::endl; + std::string message; - char** messages = backtrace_symbols(array, size); - Auto(free(messages)); - - for (int i = 0; i < size && messages != NULL; ++i) - stream << "[bt]: (" << i << ") " << messages[i] << std::endl; - - stream << std::endl; + for (int i = 0; i < stacktrace.size(); i++) { + message.append(fmt::format("\n at {} ({})", stacktrace[i].function, + stacktrace[i].location)); + } + stream << message << std::endl; } // TODO: log to local file or remote database -void terminate_handler(std::ostream& stream) noexcept -{ - if (auto exc = std::current_exception()) - { - try - { - std::rethrow_exception(exc); - } - catch(std::exception& ex) - { - stream << ex.what() << std::endl << std::endl; - stacktrace(stream); - } +void terminate_handler(std::ostream& stream) noexcept { + if (auto exc = std::current_exception()) { + try { + std::rethrow_exception(exc); + } catch (std::exception& ex) { + stream << ex.what() << std::endl << std::endl; + stacktrace(stream); } - - std::abort(); + } + std::abort(); } -void terminate_handler() noexcept -{ - terminate_handler(std::cout); -} +void terminate_handler() noexcept { terminate_handler(std::cout); } diff --git a/src/memgraph_bolt.cpp b/src/memgraph_bolt.cpp index b5d67ea3b..03fcb2e51 100644 --- a/src/memgraph_bolt.cpp +++ b/src/memgraph_bolt.cpp @@ -1,5 +1,5 @@ -#include #include +#include #include "communication/bolt/v1/server/server.hpp" #include "communication/bolt/v1/server/worker.hpp" @@ -9,74 +9,84 @@ #include "logging/default.hpp" #include "logging/streams/stdout.hpp" +#include "utils/signals/handler.hpp" +#include "utils/stacktrace.hpp" #include "utils/terminate_handler.hpp" static bolt::Server* serverptr; Logger logger; -void sigint_handler(int s) -{ - auto signal = s == SIGINT ? "SIGINT" : "SIGABRT"; - - logger.info("Recieved signal {}", signal); - logger.info("Shutting down..."); - - std::exit(EXIT_SUCCESS); -} - static constexpr const char* interface = "0.0.0.0"; static constexpr const char* port = "7687"; -int main(void) -{ - // TODO figure out what is the relationship between this and signals - // that are configured below - std::set_terminate(&terminate_handler); +void throw_and_stacktace(std::string message) { + Stacktrace stacktrace; - // logger init -#ifdef SYNC_LOGGER - logging::init_sync(); -#else - logging::init_async(); -#endif - logging::log->pipe(std::make_unique()); + for (int i = 0; i < stacktrace.size(); i++) + message.append(fmt::format("\n at {} ({})", stacktrace[i].function, + stacktrace[i].location)); - // get Main logger - logger = logging::log->logger("Main"); - logger.info("{}", logging::log->type()); - - signal(SIGINT, sigint_handler); - signal(SIGABRT, sigint_handler); - - io::Socket socket; - - try - { - socket = io::Socket::bind(interface, port); - } - catch(io::NetworkError e) - { - logger.error("Cannot bind to socket on {} at {}", interface, port); - logger.error("{}", e.what()); - - std::exit(EXIT_FAILURE); - } - - socket.set_non_blocking(); - socket.listen(1024); - - logger.info("Listening on {} at {}", interface, port); - - bolt::Server server(std::move(socket)); - serverptr = &server; - - // TODO: N should be configurable - auto N = std::thread::hardware_concurrency(); - logger.info("Starting {} workers", N); - server.start(N); - - logger.info("Shutting down..."); - - return EXIT_SUCCESS; + logger.info(message); +} + +int main(void) { + // TODO figure out what is the relationship between this and signals + // that are configured below + std::set_terminate(&terminate_handler); + +// logger init +#ifdef SYNC_LOGGER + logging::init_sync(); +#else + logging::init_async(); +#endif + logging::log->pipe(std::make_unique()); + + // get Main logger + logger = logging::log->logger("Main"); + logger.info("{}", logging::log->type()); + + SignalHandler::register_handler(Signal::SegmentationFault, []() { + throw_and_stacktace("SegmentationFault signal raised"); + exit(1); + }); + + SignalHandler::register_handler(Signal::Terminate, []() { + throw_and_stacktace("Terminate signal raised"); + exit(1); + }); + + SignalHandler::register_handler(Signal::Abort, []() { + throw_and_stacktace("Abort signal raised"); + exit(1); + }); + + io::Socket socket; + + try { + socket = io::Socket::bind(interface, port); + } catch (io::NetworkError e) { + logger.error("Cannot bind to socket on {} at {}", interface, port); + logger.error("{}", e.what()); + + std::exit(EXIT_FAILURE); + } + + socket.set_non_blocking(); + socket.listen(1024); + + logger.info("Listening on {} at {}", interface, port); + + bolt::Server server(std::move(socket)); + serverptr = &server; + + // TODO: N should be configurable + auto N = std::thread::hardware_concurrency(); + logger.info("Starting {} workers", N); + server.start(N); + + logger.info("Shutting down..."); + + return EXIT_SUCCESS; } diff --git a/tests/unit/signal_handler.cpp b/tests/unit/signal_handler.cpp new file mode 100644 index 000000000..d5e83a8d8 --- /dev/null +++ b/tests/unit/signal_handler.cpp @@ -0,0 +1,27 @@ +#define CATCH_CONFIG_MAIN +#include "catch.hpp" + +#include +#include +#include + +#include "utils/signals/handler.hpp" +#include "utils/stacktrace.hpp" + +TEST_CASE("SignalHandler Segmentation Fault Test") { + SignalHandler::register_handler(Signal::SegmentationFault, []() { + std::cout << "Segmentation Fault" << std::endl; + Stacktrace stacktrace; + + int size = 10; + std::string message; + for (int i = 0; i < size; i++) { + message.append(fmt::format("\n at {} ({})", stacktrace[i].function, + stacktrace[i].location)); + } + std::cout << message << std::endl; + + }); + + std::raise(SIGSEGV); +} From d158333335853af75eb00e09ee70c5723623cb77 Mon Sep 17 00:00:00 2001 From: Marko Budiselic Date: Fri, 16 Dec 2016 14:05:04 +0100 Subject: [PATCH 07/26] Added memgraph prefix. Fixes T160 Summary: Added memgraph prefix. Fixes T160 Test Plan: manual Reviewers: sale Subscribers: sale, buda Maniphest Tasks: T160 Differential Revision: https://memgraph.phacility.com/D18 --- CMakeLists.txt | 47 ++++++++-- tests/CMakeLists.txt | 123 +------------------------ tests/benchmark/CMakeLists.txt | 48 +++++++--- tests/benchmark/example.cpp | 34 ------- tests/benchmark/gbenchmark_example.cpp | 34 ------- tests/concurrent/CMakeLists.txt | 41 +++++++++ tests/integration/CMakeLists.txt | 43 +++++++++ tests/manual/CMakeLists.txt | 47 ++++++++++ tests/try/glibcpp_problem/Makefile | 48 ---------- tests/try/glibcpp_problem/main.cpp | 20 ---- tests/try/iterator/main.cpp | 15 --- tests/unit/CMakeLists.txt | 47 ++++++++++ 12 files changed, 256 insertions(+), 291 deletions(-) delete mode 100644 tests/benchmark/example.cpp delete mode 100644 tests/benchmark/gbenchmark_example.cpp create mode 100644 tests/concurrent/CMakeLists.txt create mode 100644 tests/integration/CMakeLists.txt create mode 100644 tests/manual/CMakeLists.txt delete mode 100644 tests/try/glibcpp_problem/Makefile delete mode 100644 tests/try/glibcpp_problem/main.cpp delete mode 100644 tests/try/iterator/main.cpp create mode 100644 tests/unit/CMakeLists.txt diff --git a/CMakeLists.txt b/CMakeLists.txt index 295694a8c..08bb29916 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,11 +1,11 @@ cmake_minimum_required(VERSION 3.1) # get directory name -get_filename_component(ProjectId ${CMAKE_SOURCE_DIR} NAME) +get_filename_component(project_name ${CMAKE_SOURCE_DIR} NAME) # replace whitespaces with underscores -string(REPLACE " " "_" ProjectId ${ProjectId}) +string(REPLACE " " "_" project_name ${project_name}) # set project name -project(${ProjectId}) +project(${project_name}) # setup CMake module path, defines path for include() and find_package() # https://cmake.org/cmake/help/latest/variable/CMAKE_MODULE_PATH.html @@ -16,6 +16,7 @@ find_package(Threads REQUIRED) # flags # c++14 +set(cxx_standard 14) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++1y") # functions @@ -252,10 +253,20 @@ option(POC "Build proof of concept binaries" ON) message(STATUS "POC binaries: ${POC}") option(TOOLS "Build tool executables" ON) message(STATUS "TOOLS binaries: ${TOOLS}") + option(TESTS "Build test binaries" ON) message(STATUS "TESTS binaries: ${TESTS}") -option(BENCHMARK "Build benchmark binaries" ON) -message(STATUS "BENCHMARK binaries: ${BENCHMARK}") + +option(BENCHMARK "Build benchmark test binaries" ON) +message(STATUS "BENCHMARK test binaries: ${BENCHMARK}") +option(CONCURRENT "Build concurrent test binaries" ON) +message(STATUS "CONCURRENT test binaries: ${CONCURRENT}") +option(INTEGRATION "Build integration test binaries" ON) +message(STATUS "INTEGRATION test binaries: ${INTEGRATION}") +option(MANUAL "Build manual test binaries" ON) +message(STATUS "MANUAL test binaries: ${MANUAL}") +option(UNIT "Build unit test binaries" ON) +message(STATUS "UNIT test binaries: ${UNIT}") # -- binaries ----------------------------------------------------------------- # -- configure defines -------------------------------------------------------- @@ -361,6 +372,10 @@ add_library(memgraph STATIC ${memgraph_src_files}) add_library(memgraph_pic STATIC ${memgraph_src_files}) set_property(TARGET memgraph_pic PROPERTY POSITION_INDEPENDENT_CODE TRUE) +# TODO: test build & run logic T190 + +include_directories(${catch_source_dir}/include) + # tests if (TESTS) enable_testing() @@ -372,11 +387,31 @@ if (POC) add_subdirectory(poc) endif() -# benchmark binaries +# benchmark test binaries if (BENCHMARK) add_subdirectory(${PROJECT_SOURCE_DIR}/tests/benchmark) endif() +# concurrent test binaries +if (CONCURRENT) + add_subdirectory(${PROJECT_SOURCE_DIR}/tests/concurrent) +endif() + +# integration test binaries +if (INTEGRATION) + add_subdirectory(${PROJECT_SOURCE_DIR}/tests/integration) +endif() + +# integration test binaries +if (MANUAL) + add_subdirectory(${PROJECT_SOURCE_DIR}/tests/manual) +endif() + +# integration test binaries +if (UNIT) + add_subdirectory(${PROJECT_SOURCE_DIR}/tests/unit) +endif() + # memgraph build name execute_process( OUTPUT_VARIABLE COMMIT_BRANCH diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 4f6669334..0daa427fa 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -1,130 +1,11 @@ cmake_minimum_required(VERSION 3.1) -project(memgraph_tests) +project(${project_name}_tests) set(src_dir ${CMAKE_SOURCE_DIR}/src) -include_directories(${catch_source_dir}/include) - -# TODO: modular approach (REFACTOR) - -## UNIT TESTS - -# find unit tests -file(GLOB_RECURSE unit_test_files ${CMAKE_HOME_DIRECTORY}/tests/unit/*.cpp) -get_file_names("${unit_test_files}" file_names) -set(unit_test_names "${file_names}") -message(STATUS "Available unit tests are: ${unit_test_names}") - # copy unit test data file(COPY ${CMAKE_SOURCE_DIR}/tests/data DESTINATION ${CMAKE_BINARY_DIR}/tests) -# build unit tests -foreach(test ${unit_test_names}) - set(test_name unit_${test}) - add_executable(${test_name} unit/${test}.cpp ${src_dir}/template_engine/engine.cpp) - target_link_libraries(${test_name} memgraph) - # TODO: separate dependencies - target_link_libraries(${test_name} stdc++fs) - target_link_libraries(${test_name} cypher_lib) - target_link_libraries(${test_name} Threads::Threads) - target_link_libraries(${test_name} ${fmt_static_lib}) - target_link_libraries(${test_name} ${yaml_static_lib}) - add_test(NAME ${test_name} COMMAND ${test_name}) - set_property(TARGET ${test_name} PROPERTY CXX_STANDARD 14) -endforeach() - -## CONCURRENCY TESTS - -# find concurrency tests -file(GLOB_RECURSE concurrency_test_files - ${CMAKE_HOME_DIRECTORY}/tests/concurrent/*.cpp) -get_file_names("${concurrency_test_files}" file_names) -set(concurrency_test_names "${file_names}") -message(STATUS "Available concurrency tests are: ${concurrency_test_names}") - -# build concurrency tests -foreach(test ${concurrency_test_names}) - set(test_name concurrent_${test}) - add_executable(${test_name} concurrent/${test}.cpp) - target_link_libraries(${test_name} memgraph) - target_link_libraries(${test_name} Threads::Threads) - target_link_libraries(${test_name} ${fmt_static_lib}) - target_link_libraries(${test_name} ${yaml_static_lib}) - add_test(NAME ${test_name} COMMAND ${test_name}) - set_property(TARGET ${test_name} PROPERTY CXX_STANDARD 14) -endforeach() - -## INTEGRATION TESTS - -# TODO: move to tests/integration folder - -# test hard coded queries -add_executable(integration_queries integration/queries.cpp) -target_link_libraries(integration_queries stdc++fs) -target_link_libraries(integration_queries memgraph) -target_link_libraries(integration_queries Threads::Threads) -target_link_libraries(integration_queries ${fmt_static_lib}) -target_link_libraries(integration_queries ${yaml_static_lib}) -add_test(NAME integration_queries COMMAND integration_queries) -set_property(TARGET integration_queries PROPERTY CXX_STANDARD 14) - -# test cleaning methods -add_executable(integration_cleaning integration/cleaning.cpp) -target_link_libraries(integration_cleaning memgraph) -target_link_libraries(integration_cleaning Threads::Threads) -target_link_libraries(integration_cleaning ${fmt_static_lib}) -target_link_libraries(integration_cleaning ${yaml_static_lib}) -add_test(NAME integration_cleaning COMMAND integration_cleaning) -set_property(TARGET integration_cleaning PROPERTY CXX_STANDARD 14) - -# test snapshot validity -add_executable(integration_snapshot integration/snapshot.cpp) -target_link_libraries(integration_snapshot memgraph) -target_link_libraries(integration_snapshot Threads::Threads) -target_link_libraries(integration_snapshot ${fmt_static_lib}) -target_link_libraries(integration_snapshot ${yaml_static_lib}) -add_test(NAME integration_snapshot COMMAND integration_snapshot) -set_property(TARGET integration_snapshot PROPERTY CXX_STANDARD 14) - -# test index validity -add_executable(integration_index integration/index.cpp) -target_link_libraries(integration_index memgraph) -target_link_libraries(integration_index Threads::Threads) -target_link_libraries(integration_index ${fmt_static_lib}) -target_link_libraries(integration_index ${yaml_static_lib}) -add_test(NAME integration_index COMMAND integration_index) -set_property(TARGET integration_index PROPERTY CXX_STANDARD 14) - -## MANUAL TESTS - -# cypher_ast -add_executable(manual_cypher_ast manual/cypher_ast.cpp) -target_link_libraries(manual_cypher_ast stdc++fs) -target_link_libraries(manual_cypher_ast memgraph) -target_link_libraries(manual_cypher_ast Threads::Threads) -target_link_libraries(manual_cypher_ast ${fmt_static_lib}) -target_link_libraries(manual_cypher_ast ${yaml_static_lib}) -target_link_libraries(manual_cypher_ast cypher_lib) -set_property(TARGET manual_cypher_ast PROPERTY CXX_STANDARD 14) - -# query_engine -add_executable(manual_query_engine manual/query_engine.cpp) -target_link_libraries(manual_query_engine stdc++fs) -target_link_libraries(manual_query_engine memgraph) -target_link_libraries(manual_query_engine ${fmt_static_lib}) -target_link_libraries(manual_query_engine ${yaml_static_lib}) -target_link_libraries(manual_query_engine dl) -target_link_libraries(manual_query_engine cypher_lib) -target_link_libraries(manual_query_engine Threads::Threads) -set_property(TARGET manual_query_engine PROPERTY CXX_STANDARD 14) - -# query_hasher -add_executable(manual_query_hasher manual/query_hasher.cpp) -target_link_libraries(manual_query_hasher stdc++fs) -target_link_libraries(manual_query_hasher memgraph) -target_link_libraries(manual_query_hasher ${fmt_static_lib}) -target_link_libraries(manual_query_hasher ${yaml_static_lib}) -target_link_libraries(manual_query_hasher Threads::Threads) -set_property(TARGET manual_query_hasher PROPERTY CXX_STANDARD 14) +# TODO: test logic here T190 diff --git a/tests/benchmark/CMakeLists.txt b/tests/benchmark/CMakeLists.txt index a30271887..6d566fc50 100644 --- a/tests/benchmark/CMakeLists.txt +++ b/tests/benchmark/CMakeLists.txt @@ -1,21 +1,43 @@ find_package(Threads REQUIRED) -file(GLOB_RECURSE ALL_BENCH_CPP *.cpp) +# set current directory name as a test type +get_filename_component(test_type ${CMAKE_CURRENT_SOURCE_DIR} NAME) -foreach(ONE_BENCH_CPP ${ALL_BENCH_CPP}) +# get all cpp abs file names recursively starting from current directory +file(GLOB_RECURSE test_type_cpps *.cpp) +message(STATUS "Available ${test_type} cpp files are: ${test_type_cpps}") - get_filename_component(ONE_BENCH_EXEC ${ONE_BENCH_CPP} NAME_WE) +# for each cpp file build binary and register test +foreach(test_cpp ${test_type_cpps}) - # Avoid name collision - set(TARGET_NAME Bench_${ONE_BENCH_EXEC}) + # get exec name (remove extension from the abs path) + get_filename_component(exec_name ${test_cpp} NAME_WE) - add_executable(${TARGET_NAME} ${ONE_BENCH_CPP}) - set_target_properties(${TARGET_NAME} PROPERTIES OUTPUT_NAME ${ONE_BENCH_EXEC}) - target_link_libraries(${TARGET_NAME} benchmark ${CMAKE_THREAD_LIBS_INIT}) - target_link_libraries(${TARGET_NAME} memgraph) - target_link_libraries(${TARGET_NAME} ${fmt_static_lib}) - target_link_libraries(${TARGET_NAME} Threads::Threads) - target_link_libraries(${TARGET_NAME} ${yaml_static_lib}) - add_test(${TARGET_NAME} ${ONE_BENCH_EXEC}) + # set target name in format {project_name}_{test_type}_{exec_name} + set(target_name ${project_name}_${test_type}_${exec_name}) + + # build exec file + add_executable(${target_name} ${test_cpp}) + set_property(TARGET ${target_name} PROPERTY CXX_STANDARD ${cxx_standard}) + + # OUTPUT_NAME sets the real name of a target when it is built and can be + # used to help create two targets of the same name even though CMake + # requires unique logical target names + set_target_properties(${target_name} PROPERTIES OUTPUT_NAME ${exec_name}) + + # link libraries + # threads (cross-platform) + target_link_libraries(${target_name} Threads::Threads) + # google-benchmark + target_link_libraries(${target_name} benchmark ${CMAKE_THREAD_LIBS_INIT}) + # memgraph lib + target_link_libraries(${target_name} memgraph) + # fmt format lib + target_link_libraries(${target_name} ${fmt_static_lib}) + # yaml parser lib + target_link_libraries(${target_name} ${yaml_static_lib}) + + # register test + add_test(${target_name} ${exec_name}) endforeach() diff --git a/tests/benchmark/example.cpp b/tests/benchmark/example.cpp deleted file mode 100644 index 4d31d8a29..000000000 --- a/tests/benchmark/example.cpp +++ /dev/null @@ -1,34 +0,0 @@ -#include "benchmark/benchmark_api.h" - -#include -#include - -static void BM_VectorInsert(benchmark::State &state) -{ - while (state.KeepRunning()) { - std::vector insertion_test; - for (int i = 0, i_end = state.range_x(); i < i_end; i++) { - insertion_test.push_back(i); - } - } -} - -// Register the function as a benchmark -BENCHMARK(BM_VectorInsert)->Range(8, 8 << 10); - -//~~~~~~~~~~~~~~~~ - -// Define another benchmark -static void BM_SetInsert(benchmark::State &state) -{ - while (state.KeepRunning()) { - std::set insertion_test; - for (int i = 0, i_end = state.range_x(); i < i_end; i++) { - insertion_test.insert(i); - } - } -} - -BENCHMARK(BM_SetInsert)->Range(8, 8 << 10); - -BENCHMARK_MAIN(); diff --git a/tests/benchmark/gbenchmark_example.cpp b/tests/benchmark/gbenchmark_example.cpp deleted file mode 100644 index 4d31d8a29..000000000 --- a/tests/benchmark/gbenchmark_example.cpp +++ /dev/null @@ -1,34 +0,0 @@ -#include "benchmark/benchmark_api.h" - -#include -#include - -static void BM_VectorInsert(benchmark::State &state) -{ - while (state.KeepRunning()) { - std::vector insertion_test; - for (int i = 0, i_end = state.range_x(); i < i_end; i++) { - insertion_test.push_back(i); - } - } -} - -// Register the function as a benchmark -BENCHMARK(BM_VectorInsert)->Range(8, 8 << 10); - -//~~~~~~~~~~~~~~~~ - -// Define another benchmark -static void BM_SetInsert(benchmark::State &state) -{ - while (state.KeepRunning()) { - std::set insertion_test; - for (int i = 0, i_end = state.range_x(); i < i_end; i++) { - insertion_test.insert(i); - } - } -} - -BENCHMARK(BM_SetInsert)->Range(8, 8 << 10); - -BENCHMARK_MAIN(); diff --git a/tests/concurrent/CMakeLists.txt b/tests/concurrent/CMakeLists.txt new file mode 100644 index 000000000..7123a7072 --- /dev/null +++ b/tests/concurrent/CMakeLists.txt @@ -0,0 +1,41 @@ +find_package(Threads REQUIRED) + +# set current directory name as a test type +get_filename_component(test_type ${CMAKE_CURRENT_SOURCE_DIR} NAME) + +# get all cpp abs file names recursively starting from current directory +file(GLOB_RECURSE test_type_cpps *.cpp) +message(STATUS "Available ${test_type} cpp files are: ${test_type_cpps}") + +# for each cpp file build binary and register test +foreach(test_cpp ${test_type_cpps}) + + # get exec name (remove extension from the abs path) + get_filename_component(exec_name ${test_cpp} NAME_WE) + + # set target name in format {project_name}_{test_type}_{exec_name} + set(target_name ${project_name}_${test_type}_${exec_name}) + + # build exec file + add_executable(${target_name} ${test_cpp}) + set_property(TARGET ${target_name} PROPERTY CXX_STANDARD ${cxx_standard}) + + # OUTPUT_NAME sets the real name of a target when it is built and can be + # used to help create two targets of the same name even though CMake + # requires unique logical target names + set_target_properties(${target_name} PROPERTIES OUTPUT_NAME ${exec_name}) + + # link libraries + # threads (cross-platform) + target_link_libraries(${target_name} Threads::Threads) + # memgraph lib + target_link_libraries(${target_name} memgraph) + # fmt format lib + target_link_libraries(${target_name} ${fmt_static_lib}) + # yaml parser lib + target_link_libraries(${target_name} ${yaml_static_lib}) + + # register test + add_test(${target_name} ${exec_name}) + +endforeach() diff --git a/tests/integration/CMakeLists.txt b/tests/integration/CMakeLists.txt new file mode 100644 index 000000000..99432b169 --- /dev/null +++ b/tests/integration/CMakeLists.txt @@ -0,0 +1,43 @@ +find_package(Threads REQUIRED) + +# set current directory name as a test type +get_filename_component(test_type ${CMAKE_CURRENT_SOURCE_DIR} NAME) + +# get all cpp abs file names recursively starting from current directory +file(GLOB test_type_cpps *.cpp) +message(STATUS "Available ${test_type} cpp files are: ${test_type_cpps}") + +# for each cpp file build binary and register test +foreach(test_cpp ${test_type_cpps}) + + # get exec name (remove extension from the abs path) + get_filename_component(exec_name ${test_cpp} NAME_WE) + + # set target name in format {project_name}_{test_type}_{exec_name} + set(target_name ${project_name}_${test_type}_${exec_name}) + + # build exec file + add_executable(${target_name} ${test_cpp}) + set_property(TARGET ${target_name} PROPERTY CXX_STANDARD ${cxx_standard}) + + # OUTPUT_NAME sets the real name of a target when it is built and can be + # used to help create two targets of the same name even though CMake + # requires unique logical target names + set_target_properties(${target_name} PROPERTIES OUTPUT_NAME ${exec_name}) + + # link libraries + # filesystem + target_link_libraries(${target_name} stdc++fs) + # threads (cross-platform) + target_link_libraries(${target_name} Threads::Threads) + # memgraph lib + target_link_libraries(${target_name} memgraph) + # fmt format lib + target_link_libraries(${target_name} ${fmt_static_lib}) + # yaml parser lib + target_link_libraries(${target_name} ${yaml_static_lib}) + + # register test + add_test(${target_name} ${exec_name}) + +endforeach() diff --git a/tests/manual/CMakeLists.txt b/tests/manual/CMakeLists.txt new file mode 100644 index 000000000..7a48747db --- /dev/null +++ b/tests/manual/CMakeLists.txt @@ -0,0 +1,47 @@ +find_package(Threads REQUIRED) + +# set current directory name as a test type +get_filename_component(test_type ${CMAKE_CURRENT_SOURCE_DIR} NAME) + +# get all cpp abs file names recursively starting from current directory +file(GLOB_RECURSE test_type_cpps *.cpp) +message(STATUS "Available ${test_type} cpp files are: ${test_type_cpps}") + +# for each cpp file build binary and register test +foreach(test_cpp ${test_type_cpps}) + + # get exec name (remove extension from the abs path) + get_filename_component(exec_name ${test_cpp} NAME_WE) + + # set target name in format {project_name}_{test_type}_{exec_name} + set(target_name ${project_name}_${test_type}_${exec_name}) + + # build exec file + add_executable(${target_name} ${test_cpp}) + set_property(TARGET ${target_name} PROPERTY CXX_STANDARD ${cxx_standard}) + + # OUTPUT_NAME sets the real name of a target when it is built and can be + # used to help create two targets of the same name even though CMake + # requires unique logical target names + set_target_properties(${target_name} PROPERTIES OUTPUT_NAME ${exec_name}) + + # link libraries + # filesystem + target_link_libraries(${target_name} stdc++fs) + # threads (cross-platform) + target_link_libraries(${target_name} Threads::Threads) + # memgraph lib + target_link_libraries(${target_name} memgraph) + # fmt format lib + target_link_libraries(${target_name} ${fmt_static_lib}) + # yaml parser lib + target_link_libraries(${target_name} ${yaml_static_lib}) + # cypher lib + target_link_libraries(${target_name} cypher_lib) + # dynamic lib + target_link_libraries(${target_name} dl) + + # register test + add_test(${target_name} ${exec_name}) + +endforeach() diff --git a/tests/try/glibcpp_problem/Makefile b/tests/try/glibcpp_problem/Makefile deleted file mode 100644 index c047682f7..000000000 --- a/tests/try/glibcpp_problem/Makefile +++ /dev/null @@ -1,48 +0,0 @@ -# compiler -CXX=clang++ - -# compile flags -CFLAGS=-std=c++1y -pthread -g2 # -D_GLIBCXX_DEBUG - -# includes and libraries -INCLUDE_PATHS=-I../../../include -I../../../libs/fmt -I../../../src -LIB_PATHS=-L../../../libs/fmt/fmt -LDFLAGS=-lfmt - -# source and executable -LOG_SRC_PATH=../../.. -SOURCES=main.cpp async_log.o sync_log.o stderr.o stdout.o default.o levels.o log.o -EXECUTABLE=a.out - -# release target -all: $(EXECUTABLE) - -$(EXECUTABLE): $(SOURCES) - $(CXX) $(CFLAGS) $(INCLUDE_PATHS) $(SOURCES) -o $(EXECUTABLE) $(LIB_PATHS) $(LDFLAGS) - -# TODO: auto -async_log.o: ../../../src/logging/logs/async_log.cpp - $(CXX) $(CFLAGS) $(INCLUDE_PATHS) -c ../../../src/logging/logs/async_log.cpp - -sync_log.o: ../../../src/logging/logs/sync_log.cpp - $(CXX) $(CFLAGS) $(INCLUDE_PATHS) -c ../../../src/logging/logs/sync_log.cpp - -stderr.o: ../../../src/logging/streams/stderr.cpp - $(CXX) $(CFLAGS) $(INCLUDE_PATHS) -c ../../../src/logging/streams/stderr.cpp - -stdout.o: ../../../src/logging/streams/stdout.cpp - $(CXX) $(CFLAGS) $(INCLUDE_PATHS) -c ../../../src/logging/streams/stdout.cpp - -default.o: ../../../src/logging/default.cpp - $(CXX) $(CFLAGS) $(INCLUDE_PATHS) -c ../../../src/logging/default.cpp - -levels.o: ../../../src/logging/levels.cpp - $(CXX) $(CFLAGS) $(INCLUDE_PATHS) -c ../../../src/logging/levels.cpp - -log.o: ../../../src/logging/log.cpp - $(CXX) $(CFLAGS) $(INCLUDE_PATHS) -c ../../../src/logging/log.cpp - -.PHONY: -clean: - rm -f a.out - rm -f *.o diff --git a/tests/try/glibcpp_problem/main.cpp b/tests/try/glibcpp_problem/main.cpp deleted file mode 100644 index 730695f64..000000000 --- a/tests/try/glibcpp_problem/main.cpp +++ /dev/null @@ -1,20 +0,0 @@ -#include - -#include "logging/default.hpp" -#include "logging/streams/stdout.hpp" - -int main(void) -{ - // init logging - logging::init_sync(); - logging::log->pipe(std::make_unique()); - - // get Main logger - Logger logger; - logger = logging::log->logger("Main"); - logger.info("{}", logging::log->type()); - - std::string* test = new std::string("test_value"); - - return 0; -} diff --git a/tests/try/iterator/main.cpp b/tests/try/iterator/main.cpp deleted file mode 100644 index 550e4fa23..000000000 --- a/tests/try/iterator/main.cpp +++ /dev/null @@ -1,15 +0,0 @@ -#include -#include - -#include "utils/iterator/map.hpp" - -int main(void) -{ - std::vector test{1,2,3}; - - for (auto item : test) { - std::cout << item << std::endl; - } - - return 0; -} diff --git a/tests/unit/CMakeLists.txt b/tests/unit/CMakeLists.txt new file mode 100644 index 000000000..7a48747db --- /dev/null +++ b/tests/unit/CMakeLists.txt @@ -0,0 +1,47 @@ +find_package(Threads REQUIRED) + +# set current directory name as a test type +get_filename_component(test_type ${CMAKE_CURRENT_SOURCE_DIR} NAME) + +# get all cpp abs file names recursively starting from current directory +file(GLOB_RECURSE test_type_cpps *.cpp) +message(STATUS "Available ${test_type} cpp files are: ${test_type_cpps}") + +# for each cpp file build binary and register test +foreach(test_cpp ${test_type_cpps}) + + # get exec name (remove extension from the abs path) + get_filename_component(exec_name ${test_cpp} NAME_WE) + + # set target name in format {project_name}_{test_type}_{exec_name} + set(target_name ${project_name}_${test_type}_${exec_name}) + + # build exec file + add_executable(${target_name} ${test_cpp}) + set_property(TARGET ${target_name} PROPERTY CXX_STANDARD ${cxx_standard}) + + # OUTPUT_NAME sets the real name of a target when it is built and can be + # used to help create two targets of the same name even though CMake + # requires unique logical target names + set_target_properties(${target_name} PROPERTIES OUTPUT_NAME ${exec_name}) + + # link libraries + # filesystem + target_link_libraries(${target_name} stdc++fs) + # threads (cross-platform) + target_link_libraries(${target_name} Threads::Threads) + # memgraph lib + target_link_libraries(${target_name} memgraph) + # fmt format lib + target_link_libraries(${target_name} ${fmt_static_lib}) + # yaml parser lib + target_link_libraries(${target_name} ${yaml_static_lib}) + # cypher lib + target_link_libraries(${target_name} cypher_lib) + # dynamic lib + target_link_libraries(${target_name} dl) + + # register test + add_test(${target_name} ${exec_name}) + +endforeach() From 885429860684516545dbd3adcfa074966b5f51cf Mon Sep 17 00:00:00 2001 From: Marko Budiselic Date: Fri, 16 Dec 2016 14:32:37 +0100 Subject: [PATCH 08/26] First step towards more test granularity. --- tests/CMakeLists.txt | 5 +---- tests/manual/CMakeLists.txt | 3 --- 2 files changed, 1 insertion(+), 7 deletions(-) diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 0daa427fa..4b48a66d6 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -2,10 +2,7 @@ cmake_minimum_required(VERSION 3.1) project(${project_name}_tests) -set(src_dir ${CMAKE_SOURCE_DIR}/src) - # copy unit test data -file(COPY ${CMAKE_SOURCE_DIR}/tests/data - DESTINATION ${CMAKE_BINARY_DIR}/tests) +file(COPY ${CMAKE_SOURCE_DIR}/tests/data DESTINATION ${CMAKE_BINARY_DIR}/tests) # TODO: test logic here T190 diff --git a/tests/manual/CMakeLists.txt b/tests/manual/CMakeLists.txt index 7a48747db..6a7cb274b 100644 --- a/tests/manual/CMakeLists.txt +++ b/tests/manual/CMakeLists.txt @@ -41,7 +41,4 @@ foreach(test_cpp ${test_type_cpps}) # dynamic lib target_link_libraries(${target_name} dl) - # register test - add_test(${target_name} ${exec_name}) - endforeach() From b4c65d9680601e628da00c4fbf4c89fdae5a8932 Mon Sep 17 00:00:00 2001 From: sale Date: Fri, 16 Dec 2016 14:49:20 +0000 Subject: [PATCH 09/26] Stacktrace dump method added Summary: Stacktrace dump method added Test Plan: manual Reviewers: buda Subscribers: buda Differential Revision: https://memgraph.phacility.com/D19 --- include/utils/stacktrace.hpp | 16 ++++++++++++++++ include/utils/terminate_handler.hpp | 9 +-------- src/memgraph_bolt.cpp | 6 +----- 3 files changed, 18 insertions(+), 13 deletions(-) diff --git a/include/utils/stacktrace.hpp b/include/utils/stacktrace.hpp index b5476eb16..b097a8000 100644 --- a/include/utils/stacktrace.hpp +++ b/include/utils/stacktrace.hpp @@ -58,6 +58,22 @@ public: return lines.size(); } + void dump(std::ostream& stream) { + std::string message; + for (int i = 0; i < size(); i++) { + message.append(fmt::format("at {} ({})\n", lines[i].function, + lines[i].location)); + } + stream << message; + } + + void dump(std::string& message) { + for (int i = 0; i < size(); i++) { + message.append(fmt::format("at {} ({}) \n", lines[i].function, + lines[i].location)); + } + } + private: std::vector lines; diff --git a/include/utils/terminate_handler.hpp b/include/utils/terminate_handler.hpp index 7ae21d5a1..467522e44 100644 --- a/include/utils/terminate_handler.hpp +++ b/include/utils/terminate_handler.hpp @@ -9,14 +9,7 @@ // TODO: log to local file or remote database void stacktrace(std::ostream& stream) noexcept { Stacktrace stacktrace; - - std::string message; - - for (int i = 0; i < stacktrace.size(); i++) { - message.append(fmt::format("\n at {} ({})", stacktrace[i].function, - stacktrace[i].location)); - } - stream << message << std::endl; + stacktrace.dump(stream); } // TODO: log to local file or remote database diff --git a/src/memgraph_bolt.cpp b/src/memgraph_bolt.cpp index 03fcb2e51..aaf9179d6 100644 --- a/src/memgraph_bolt.cpp +++ b/src/memgraph_bolt.cpp @@ -22,11 +22,7 @@ static constexpr const char* port = "7687"; void throw_and_stacktace(std::string message) { Stacktrace stacktrace; - - for (int i = 0; i < stacktrace.size(); i++) - message.append(fmt::format("\n at {} ({})", stacktrace[i].function, - stacktrace[i].location)); - + stacktrace.dump(message); logger.info(message); } From fcc1642cadd7827b8110c1be7eda64f62c4a22d4 Mon Sep 17 00:00:00 2001 From: sale Date: Fri, 16 Dec 2016 16:10:29 +0000 Subject: [PATCH 10/26] Final signal handler test and stacktrace update --- include/utils/stacktrace.hpp | 5 +---- tests/unit/signal_handler.cpp | 6 +----- 2 files changed, 2 insertions(+), 9 deletions(-) diff --git a/include/utils/stacktrace.hpp b/include/utils/stacktrace.hpp index b097a8000..df0b8a829 100644 --- a/include/utils/stacktrace.hpp +++ b/include/utils/stacktrace.hpp @@ -60,10 +60,7 @@ public: void dump(std::ostream& stream) { std::string message; - for (int i = 0; i < size(); i++) { - message.append(fmt::format("at {} ({})\n", lines[i].function, - lines[i].location)); - } + dump(message); stream << message; } diff --git a/tests/unit/signal_handler.cpp b/tests/unit/signal_handler.cpp index d5e83a8d8..e38d61f67 100644 --- a/tests/unit/signal_handler.cpp +++ b/tests/unit/signal_handler.cpp @@ -13,12 +13,8 @@ TEST_CASE("SignalHandler Segmentation Fault Test") { std::cout << "Segmentation Fault" << std::endl; Stacktrace stacktrace; - int size = 10; std::string message; - for (int i = 0; i < size; i++) { - message.append(fmt::format("\n at {} ({})", stacktrace[i].function, - stacktrace[i].location)); - } + stacktrace.dump(message); std::cout << message << std::endl; }); From f4455daeb2469b84f1d93b265b23d773dfbc5317 Mon Sep 17 00:00:00 2001 From: Marko Budiselic Date: Sat, 17 Dec 2016 21:00:32 +0100 Subject: [PATCH 11/26] CMake refactor done - added single test granularity. Fixes T190, T194 --- CMakeLists.txt | 262 ++++++++++++++----------------- cmake/functions.cmake | 29 ++++ poc/CMakeLists.txt | 62 ++++---- tests/CMakeLists.txt | 31 +++- tests/benchmark/CMakeLists.txt | 2 +- tests/concurrent/CMakeLists.txt | 2 +- tests/integration/CMakeLists.txt | 2 +- tests/manual/CMakeLists.txt | 2 +- tests/unit/CMakeLists.txt | 2 +- tests/unit/cypher_traversal.cpp | 2 +- 10 files changed, 205 insertions(+), 191 deletions(-) create mode 100644 cmake/functions.cmake diff --git a/CMakeLists.txt b/CMakeLists.txt index 08bb29916..2fdf552c3 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,58 +1,46 @@ +# MemGraph CMake configuration + cmake_minimum_required(VERSION 3.1) +# !! IMPORTANT !! run ./project_root/init.sh before cmake command +# to download dependencies + +# choose a compiler +# NOTE: must be choosen before use of project() or enable_language() +if (UNIX) +set(CMAKE_C_COMPILER "clang") +set(CMAKE_CXX_COMPILER "clang++") +endif (UNIX) +# ----------------------------------------------------------------------------- + +# set project name # get directory name get_filename_component(project_name ${CMAKE_SOURCE_DIR} NAME) # replace whitespaces with underscores string(REPLACE " " "_" project_name ${project_name}) # set project name project(${project_name}) +# ----------------------------------------------------------------------------- # setup CMake module path, defines path for include() and find_package() # https://cmake.org/cmake/help/latest/variable/CMAKE_MODULE_PATH.html set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${PROJECT_SOURCE_DIR}/cmake) +# ----------------------------------------------------------------------------- +# custom function definitions +include(functions) +# ----------------------------------------------------------------------------- + +# threading find_package(Threads REQUIRED) - -# flags +# ----------------------------------------------------------------------------- # c++14 set(cxx_standard 14) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++1y") - -# functions - -# prints all included directories -function(list_includes) - get_property(dirs DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} - PROPERTY INCLUDE_DIRECTORIES) - foreach(dir ${dirs}) - message(STATUS "dir='${dir}'") - endforeach() -endfunction(list_includes) - -# get file names from list of file paths -function(get_file_names file_paths file_names) - set(file_names "") - foreach(file_path ${file_paths}) - get_filename_component (file_name ${file_path} NAME_WE) - list(APPEND file_names ${file_name}) - endforeach() - set(file_names "${file_names}" PARENT_SCOPE) -endfunction() - -MACRO(SUBDIRLIST result curdir) - FILE(GLOB children RELATIVE ${curdir} ${curdir}/*) - SET(dirlist "") - FOREACH(child ${children}) - IF(IS_DIRECTORY ${curdir}/${child}) - LIST(APPEND dirlist ${child}) - ENDIF() - ENDFOREACH() - SET(${result} ${dirlist}) -ENDMACRO() +# ----------------------------------------------------------------------------- # custom targets - # move test data data to the build directory if (UNIX) set(test_data "tests/data") @@ -63,21 +51,18 @@ if (UNIX) COMMAND cp -r ${test_data_src} ${test_data_dst} ) endif (UNIX) +# ----------------------------------------------------------------------------- -# external dependencies - +# dir variables set(src_dir ${CMAKE_SOURCE_DIR}/src) set(libs_dir ${CMAKE_SOURCE_DIR}/libs) set(include_dir ${CMAKE_SOURCE_DIR}/include) set(build_include_dir ${CMAKE_BINARY_DIR}/include) set(test_include_dir ${CMAKE_BINARY_DIR}/tests/include) set(test_src_dir ${CMAKE_BINARY_DIR}/tests/src) +# ----------------------------------------------------------------------------- # setup external dependencies - -# !! IMPORTANT !! run ./libs/setup.sh before cmake command -# TODO: run from execute_process - # lemon & lempar set(lemon_dir ${libs_dir}/lemon) # lexertl @@ -91,14 +76,17 @@ set(yaml_include_dir ${yaml_source_dir}/include) set(yaml_static_lib ${yaml_source_dir}/libyaml-cpp.a) # Catch (C++ Automated Test Cases in Headers) set(catch_source_dir "${libs_dir}/Catch") +# ----------------------------------------------------------------------------- # load cmake modules: cmake/*.cmake include(gtest) include(gbenchmark) +# ----------------------------------------------------------------------------- # build memgraph's cypher grammar # copy grammar file to the build directory -FILE(COPY ${include_dir}/query/language/cypher/cypher.y DESTINATION ${CMAKE_BINARY_DIR}) +FILE(COPY ${include_dir}/query/language/cypher/cypher.y + DESTINATION ${CMAKE_BINARY_DIR}) # build cypher parser (only c file - cypher.c) EXECUTE_PROCESS( COMMAND ${lemon_dir}/lemon ${CMAKE_BINARY_DIR}/cypher.y -s @@ -110,34 +98,22 @@ FILE(RENAME ${CMAKE_BINARY_DIR}/cypher.c ${CMAKE_BINARY_DIR}/cypher.cpp) SET(cypher_build_include_dir ${build_include_dir}/cypher) FILE(MAKE_DIRECTORY ${cypher_build_include_dir}) FILE(RENAME ${CMAKE_BINARY_DIR}/cypher.h ${cypher_build_include_dir}/cypher.h) +# ----------------------------------------------------------------------------- # prepare template and destination folders for query engine (tests) # and memgraph server binary # copy query_engine's templates file -FILE(COPY ${src_dir}/query_engine/template DESTINATION ${CMAKE_BINARY_DIR}/tests) +FILE(COPY ${src_dir}/query_engine/template + DESTINATION ${CMAKE_BINARY_DIR}/tests) FILE(COPY ${src_dir}/query_engine/template DESTINATION ${CMAKE_BINARY_DIR}) # create destination folder for compiled queries FILE(MAKE_DIRECTORY ${CMAKE_BINARY_DIR}/tests/compiled/cpu) FILE(MAKE_DIRECTORY ${CMAKE_BINARY_DIR}/compiled/cpu) +# ----------------------------------------------------------------------------- -# TODO: filter header files, all files don't need to be copied -# they are all copied because query engine needs header files during -# query compilation -# TODO: make a function (REMOVE copy pasted part) -# SUBDIRLIST(source_folders ${src_dir}) -# foreach(source_folder ${source_folders}) -# file(COPY ${src_dir}/${source_folder} DESTINATION ${build_include_dir}) -# endforeach() -SUBDIRLIST(source_folders ${src_dir}) -foreach(source_folder ${source_folders}) - file(COPY ${src_dir}/${source_folder} DESTINATION ${test_src_dir}) -endforeach() -SUBDIRLIST(source_folders ${include_dir}) -foreach(source_foler ${source_folders}) - file(COPY ${include_dir}/${source_folder} DESTINATION ${test_include_dir}) -endforeach() - +# copy files needed for query engine (headers) include(copy_includes) +# ----------------------------------------------------------------------------- # linter setup (clang-tidy) # all source files for linting @@ -147,7 +123,6 @@ FILE(GLOB_RECURSE LINTER_SRC_FILES ${CMAKE_SOURCE_DIR}/poc/.cpp ) MESSAGE(STATUS "All cpp files for linting are: ${LINTER_SRC_FILES}") - # linter target clang-tidy find_program(CLANG_TIDY "clang-tidy") if(CLANG_TIDY) @@ -161,24 +136,29 @@ if(CLANG_TIDY) -I${CMAKE_SOURCE_DIR}/include -I${fmt_source_dir} -I${yaml_include_dir} ) endif() -# linter setup +# ----------------------------------------------------------------------------- -# debug flags +# TODO: add specific flags +# release flags +set(CMAKE_CXX_FLAGS_RELEASE "-O2 -DNDEBUG") +#debug flags +set(CMAKE_CXX_FLAGS_DEBUG "-g") + +# compiler specific flags if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang") # set(CMAKE_CXX_FLAGS_DEBUG "-Wl,--export-dynamic ${CMAKE_CXX_FLAGS_DEBUG}") elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU") # set(CMAKE_CXX_FLAGS_DEBUG "-rdynamic ${CMAKE_CXX_FLAGS_DEBUG}") endif() -# release flags -set(CMAKE_CXX_FLAGS_RELEASE "-O2 ${CMAKE_CXX_FLAGS_RELEASE}") +# default build type is debug +if ("${CMAKE_BUILD_TYPE}" STREQUAL "") + set(CMAKE_BUILD_TYPE "debug") +endif() +message(STATUS "CMake build type: ${CMAKE_BUILD_TYPE}") +# ----------------------------------------------------------------------------- -#debug flags -set(CMAKE_CXX_FLAGS_DEBUG "-g2 ${CMAKE_CXX_FLAGS_DEBUG}") - -# TODO: find a way how to applay the defines at the query compile time -# -- configure defines -- default is ON | true | enabled ---------------------- -# -- logging ------------------------------------------------------------------ +# logging levels option(LOG_NO_TRACE "Disable trace logging" OFF) message(STATUS "LOG_NO_TRACE: ${LOG_NO_TRACE}") if (LOG_NO_TRACE) @@ -208,15 +188,20 @@ message(STATUS "LOG_NO_ERROR: ${LOG_NO_ERROR}") if (LOG_NO_ERROR) add_definitions(-DLOG_NO_ERROR) endif() -# -- logging ------------------------------------------------------------------ -# -- logger ------------------------------------------------------------------- -option(SYNC_LOGGER "" OFF) -message(STATUS "SYNC LOGGER: ${SYNC_LOGGER}") +# TODO: find a way how to applay those defines at the query compile time +# ----------------------------------------------------------------------------- + +# logger type +# the default logger is sync logger +# on: cmake ... -DSYNC_LOGGER=OFF ... async logger is going to be used +option(SYNC_LOGGER "Sync logger" ON) +message(STATUS "SYNC_LOGGER: ${SYNC_LOGGER}") if (SYNC_LOGGER) add_definitions(-DSYNC_LOGGER) endif() -# -- logger ------------------------------------------------------------------- -# -- assert ------------------------------------------------------------------- +# ----------------------------------------------------------------------------- + +# assert option(RUNTIME_ASSERT "Enable runtime assertions" ON) message(STATUS "RUNTIME_ASSERT: ${RUNTIME_ASSERT}") if(RUNTIME_ASSERT) @@ -228,49 +213,52 @@ message(STATUS "THROW_EXCEPTION_ON_ERROR: ${THROW_EXCEPTION_ON_ERROR}") if(THROW_EXCEPTION_ON_ERROR) add_definitions(-DTHROW_EXCEPTION_ON_ERROR) endif() -# -- assert ------------------------------------------------------------------- -# -- ndebug ------------------------------------------------------------------- +# ----------------------------------------------------------------------------- + +# ndebug option(NDEBUG "No debug" OFF) -message(STATUS "NDEBUG: ${NDEBUG} (be careful CMAKE_BUILD_TYPE can also append this flag)") +message(STATUS "NDEBUG: ${NDEBUG} (be careful CMAKE_BUILD_TYPE can also \ +append this flag)") if(NDEBUG) add_definitions( -DNDEBUG ) endif() -# -- ndebug ------------------------------------------------------------------- +# ----------------------------------------------------------------------------- + # -- GLIBCXX_DEBUG ------------------------------------------------------------ # glibcxx debug (useful for gdb) # the problem is that the query engine doesn't work as it should work if -# this flag is present +# this flag is present (TODO: figure out why) option(GLIBCXX_DEBUG "glibc debug" OFF) -message(STATUS "GLIBCXX_DEBUG: ${GLIBCXX_DEBUG} (solves problem with _M_dataplus member during a debugging process") +message(STATUS "GLIBCXX_DEBUG: ${GLIBCXX_DEBUG} (solves problem with \ +_M_dataplus member during a debugging process)") if(GLIBCXX_DEBUG) set(CMAKE_CXX_FLAGS_DEBUG "-D_GLIBCXX_DEBUG ${CMAKE_CXX_FLAGS_DEBUG}") endif() # ----------------------------------------------------------------------------- -# -- binaries ----------------------------------------------------------------- + +# option binaries +# memgraph option(MEMGRAPH "Build memgraph binary" ON) message(STATUS "MEMGRAPH binary: ${MEMGRAPH}") +# proof of concept option(POC "Build proof of concept binaries" ON) message(STATUS "POC binaries: ${POC}") -option(TOOLS "Build tool executables" ON) -message(STATUS "TOOLS binaries: ${TOOLS}") +# tests +option(ALL_TESTS "Add all test binaries" ON) +message(STATUS "Add all test binaries: ${ALL_TESTS}") +option(BENCHMARK_TESTS "Add benchmark test binaries" OFF) +message(STATUS "Add benchmark test binaries: ${BENCHMARK_TESTS}") +option(CONCURRENT_TESTS "Add concurrent test binaries" OFF) +message(STATUS "Add concurrent test binaries: ${CONCURRENT_TESTS}") +option(INTEGRATION_TESTS "Add integration test binaries" OFF) +message(STATUS "Add integration test binaries: ${INTEGRATION_TESTS}") +option(MANUAL_TESTS "Add manual test binaries" OFF) +message(STATUS "Add manual test binaries: ${MANUAL_TESTS}") +option(UNIT_TESTS "Add unit test binaries" OFF) +message(STATUS "Add unit test binaries: ${UNIT_TESTS}") +# ----------------------------------------------------------------------------- -option(TESTS "Build test binaries" ON) -message(STATUS "TESTS binaries: ${TESTS}") - -option(BENCHMARK "Build benchmark test binaries" ON) -message(STATUS "BENCHMARK test binaries: ${BENCHMARK}") -option(CONCURRENT "Build concurrent test binaries" ON) -message(STATUS "CONCURRENT test binaries: ${CONCURRENT}") -option(INTEGRATION "Build integration test binaries" ON) -message(STATUS "INTEGRATION test binaries: ${INTEGRATION}") -option(MANUAL "Build manual test binaries" ON) -message(STATUS "MANUAL test binaries: ${MANUAL}") -option(UNIT "Build unit test binaries" ON) -message(STATUS "UNIT test binaries: ${UNIT}") -# -- binaries ----------------------------------------------------------------- -# -- configure defines -------------------------------------------------------- - -# -- includes ----------------------------------------------------------------- +# includes include_directories(${CMAKE_SOURCE_DIR}/include) include_directories(${src_dir}) include_directories(${build_include_dir}) @@ -285,14 +273,17 @@ include_directories(${r3_source_dir}/include) # creates build/libcypher_lib.a add_library(cypher_lib STATIC ${CMAKE_BINARY_DIR}/cypher.cpp) +# ----------------------------------------------------------------------------- -# REST API preprocessor -EXECUTE_PROCESS( - COMMAND python link_resources.py - WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/src/api -) +# TODO: remove from here (isolate HTTP server) +# # REST API preprocessor +# EXECUTE_PROCESS( +# COMMAND python link_resources.py +# WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/src/api +# ) +# # --------------------------------------------------------------------------- -# TODO: create separate static library from bolt code +# all memgraph src files set(memgraph_src_files ${src_dir}/config/config.cpp ${src_dir}/dbms/dbms.cpp @@ -364,53 +355,29 @@ set(memgraph_src_files ${src_dir}/storage/edge_accessor.cpp ${src_dir}/storage/record_accessor.cpp ) +# ----------------------------------------------------------------------------- # STATIC library used by memgraph executables -add_library(memgraph STATIC ${memgraph_src_files}) +add_library(memgraph_lib STATIC ${memgraph_src_files}) +# ----------------------------------------------------------------------------- # STATIC PIC library used by query engine add_library(memgraph_pic STATIC ${memgraph_src_files}) set_property(TARGET memgraph_pic PROPERTY POSITION_INDEPENDENT_CODE TRUE) - -# TODO: test build & run logic T190 - -include_directories(${catch_source_dir}/include) - -# tests -if (TESTS) - enable_testing() - add_subdirectory(tests) -endif() +# ----------------------------------------------------------------------------- # proof of concepts if (POC) add_subdirectory(poc) endif() +# ----------------------------------------------------------------------------- -# benchmark test binaries -if (BENCHMARK) - add_subdirectory(${PROJECT_SOURCE_DIR}/tests/benchmark) -endif() - -# concurrent test binaries -if (CONCURRENT) - add_subdirectory(${PROJECT_SOURCE_DIR}/tests/concurrent) -endif() - -# integration test binaries -if (INTEGRATION) - add_subdirectory(${PROJECT_SOURCE_DIR}/tests/integration) -endif() - -# integration test binaries -if (MANUAL) - add_subdirectory(${PROJECT_SOURCE_DIR}/tests/manual) -endif() - -# integration test binaries -if (UNIT) - add_subdirectory(${PROJECT_SOURCE_DIR}/tests/unit) +# tests +if (ALL_TESTS OR BENCHMARK_TESTS OR CONCURRENT_TEST OR INTEGRATION_TEST + OR MANUAL_TESTS OR UNIT_TESTS) + add_subdirectory(tests) endif() +# ----------------------------------------------------------------------------- # memgraph build name execute_process( @@ -430,19 +397,17 @@ string(STRIP ${COMMIT_NO} COMMIT_NO) string(STRIP ${COMMIT_HASH} COMMIT_HASH) set(MEMGRAPH_BUILD_NAME "memgraph_${COMMIT_NO}_${COMMIT_HASH}_${COMMIT_BRANCH}_${CMAKE_BUILD_TYPE}") - -message(STATUS "CMake build type: ${CMAKE_BUILD_TYPE}") -message(STATUS "Debug flags: ${CMAKE_CXX_FLAGS_DEBUG}") -message(STATUS "Release flags: ${CMAKE_CXX_FLAGS_RELEASE}") +# ----------------------------------------------------------------------------- # memgraph main executable if (MEMGRAPH) add_executable(${MEMGRAPH_BUILD_NAME} ${src_dir}/memgraph_bolt.cpp) - target_link_libraries(${MEMGRAPH_BUILD_NAME} memgraph) + target_link_libraries(${MEMGRAPH_BUILD_NAME} memgraph_lib) target_link_libraries(${MEMGRAPH_BUILD_NAME} stdc++fs) target_link_libraries(${MEMGRAPH_BUILD_NAME} Threads::Threads) target_link_libraries(${MEMGRAPH_BUILD_NAME} cypher_lib) + if (UNIX) target_link_libraries(${MEMGRAPH_BUILD_NAME} crypto) # target_link_libraries(${MEMGRAPH_BUILD_NAME} ssl) @@ -451,3 +416,4 @@ if (MEMGRAPH) target_link_libraries(${MEMGRAPH_BUILD_NAME} dl) endif (UNIX) endif() +# ----------------------------------------------------------------------------- diff --git a/cmake/functions.cmake b/cmake/functions.cmake new file mode 100644 index 000000000..109db7944 --- /dev/null +++ b/cmake/functions.cmake @@ -0,0 +1,29 @@ +# prints all included directories +function(list_includes) + get_property(dirs DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} + PROPERTY INCLUDE_DIRECTORIES) + foreach(dir ${dirs}) + message(STATUS "dir='${dir}'") + endforeach() +endfunction(list_includes) + +# get file names from list of file paths +function(get_file_names file_paths file_names) + set(file_names "") + foreach(file_path ${file_paths}) + get_filename_component (file_name ${file_path} NAME_WE) + list(APPEND file_names ${file_name}) + endforeach() + set(file_names "${file_names}" PARENT_SCOPE) +endfunction() + +MACRO(SUBDIRLIST result curdir) + FILE(GLOB children RELATIVE ${curdir} ${curdir}/*) + SET(dirlist "") + FOREACH(child ${children}) + IF(IS_DIRECTORY ${curdir}/${child}) + LIST(APPEND dirlist ${child}) + ENDIF() + ENDFOREACH() + SET(${result} ${dirlist}) +ENDMACRO() diff --git a/poc/CMakeLists.txt b/poc/CMakeLists.txt index ccad1fadf..f486a7a49 100644 --- a/poc/CMakeLists.txt +++ b/poc/CMakeLists.txt @@ -1,45 +1,37 @@ -cmake_minimum_required(VERSION 3.1) +find_package(Threads REQUIRED) -project(memgraph_poc) +# get all cpp abs file names recursively starting from current directory +file(GLOB poc_cpps *.cpp) +message(STATUS "Available poc cpp files are: ${poc_cpps}") include_directories(${CMAKE_SOURCE_DIR}/poc) -include_directories(${CMAKE_SOURCE_DIR}/queries) -add_executable(poc_astar astar.cpp) -target_link_libraries(poc_astar memgraph) -target_link_libraries(poc_astar Threads::Threads) -target_link_libraries(poc_astar ${fmt_static_lib}) -target_link_libraries(poc_astar ${yaml_static_lib}) +# for each cpp file build binary +foreach(poc_cpp ${poc_cpps}) -add_executable(powerlinx_profile profile.cpp) -target_link_libraries(powerlinx_profile memgraph) -target_link_libraries(powerlinx_profile Threads::Threads) -target_link_libraries(powerlinx_profile ${fmt_static_lib}) -target_link_libraries(powerlinx_profile ${yaml_static_lib}) + # get exec name (remove extension from the abs path) + get_filename_component(exec_name ${poc_cpp} NAME_WE) -add_executable(csv_import csv_import.cpp) -target_link_libraries(csv_import memgraph) -target_link_libraries(csv_import Threads::Threads) -target_link_libraries(csv_import ${fmt_static_lib}) -target_link_libraries(csv_import ${yaml_static_lib}) + # set target name in format {project_name}_{test_type}_{exec_name} + set(target_name ${project_name}_poc_${exec_name}) -add_executable(add_double add_double.cpp) -target_link_libraries(add_double memgraph) -target_link_libraries(add_double Threads::Threads) -target_link_libraries(add_double ${fmt_static_lib}) -target_link_libraries(add_double ${yaml_static_lib}) + # build exec file + add_executable(${target_name} ${poc_cpp} isolation/header.cpp) + set_property(TARGET ${target_name} PROPERTY CXX_STANDARD ${cxx_standard}) -add_executable(astar_query astar_query.cpp) -target_link_libraries(astar_query memgraph) -target_link_libraries(astar_query Threads::Threads) -target_link_libraries(astar_query ${fmt_static_lib}) -target_link_libraries(astar_query ${yaml_static_lib}) + # OUTPUT_NAME sets the real name of a target when it is built and can be + # used to help create two targets of the same name even though CMake + # requires unique logical target names + set_target_properties(${target_name} PROPERTIES OUTPUT_NAME ${exec_name}) -add_executable(size_aligment size_aligment.cpp) -target_link_libraries(size_aligment memgraph) -target_link_libraries(size_aligment Threads::Threads) -target_link_libraries(size_aligment ${fmt_static_lib}) -target_link_libraries(size_aligment ${yaml_static_lib}) + # link libraries + # threads (cross-platform) + target_link_libraries(${target_name} Threads::Threads) + # memgraph lib + target_link_libraries(${target_name} memgraph_lib) + # fmt format lib + target_link_libraries(${target_name} ${fmt_static_lib}) + # yaml parser lib + target_link_libraries(${target_name} ${yaml_static_lib}) -add_executable(isolation isolation.cpp isolation/header.cpp) -target_link_libraries(isolation ${fmt_static_lib}) +endforeach() diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 0daa427fa..8e243cf8d 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -2,10 +2,37 @@ cmake_minimum_required(VERSION 3.1) project(${project_name}_tests) -set(src_dir ${CMAKE_SOURCE_DIR}/src) +enable_testing() + +include_directories(${catch_source_dir}/include) # copy unit test data file(COPY ${CMAKE_SOURCE_DIR}/tests/data DESTINATION ${CMAKE_BINARY_DIR}/tests) +file(COPY ${CMAKE_SOURCE_DIR}/tests/data + DESTINATION ${CMAKE_BINARY_DIR}/tests/unit) -# TODO: test logic here T190 +# benchmark test binaries +if (ALL_TESTS OR BENCHMARK_TESTS) + add_subdirectory(${PROJECT_SOURCE_DIR}/benchmark) +endif() + +# concurrent test binaries +if (ALL_TESTS OR CONCURRENT_TESTS) + add_subdirectory(${PROJECT_SOURCE_DIR}/concurrent) +endif() + +# integration test binaries +if (ALL_TESTS OR INTEGRATION_TESTS) + add_subdirectory(${PROJECT_SOURCE_DIR}/integration) +endif() + +# manual test binaries +if (ALL_TESTS OR MANUAL_TESTS) + add_subdirectory(${PROJECT_SOURCE_DIR}/manual) +endif() + +# unit test binaries +if (ALL_TESTS OR UNIT_TESTS) + add_subdirectory(${PROJECT_SOURCE_DIR}/unit) +endif() diff --git a/tests/benchmark/CMakeLists.txt b/tests/benchmark/CMakeLists.txt index 6d566fc50..f0224f4c2 100644 --- a/tests/benchmark/CMakeLists.txt +++ b/tests/benchmark/CMakeLists.txt @@ -31,7 +31,7 @@ foreach(test_cpp ${test_type_cpps}) # google-benchmark target_link_libraries(${target_name} benchmark ${CMAKE_THREAD_LIBS_INIT}) # memgraph lib - target_link_libraries(${target_name} memgraph) + target_link_libraries(${target_name} memgraph_lib) # fmt format lib target_link_libraries(${target_name} ${fmt_static_lib}) # yaml parser lib diff --git a/tests/concurrent/CMakeLists.txt b/tests/concurrent/CMakeLists.txt index 7123a7072..16c257500 100644 --- a/tests/concurrent/CMakeLists.txt +++ b/tests/concurrent/CMakeLists.txt @@ -29,7 +29,7 @@ foreach(test_cpp ${test_type_cpps}) # threads (cross-platform) target_link_libraries(${target_name} Threads::Threads) # memgraph lib - target_link_libraries(${target_name} memgraph) + target_link_libraries(${target_name} memgraph_lib) # fmt format lib target_link_libraries(${target_name} ${fmt_static_lib}) # yaml parser lib diff --git a/tests/integration/CMakeLists.txt b/tests/integration/CMakeLists.txt index 99432b169..d66e52b4c 100644 --- a/tests/integration/CMakeLists.txt +++ b/tests/integration/CMakeLists.txt @@ -31,7 +31,7 @@ foreach(test_cpp ${test_type_cpps}) # threads (cross-platform) target_link_libraries(${target_name} Threads::Threads) # memgraph lib - target_link_libraries(${target_name} memgraph) + target_link_libraries(${target_name} memgraph_lib) # fmt format lib target_link_libraries(${target_name} ${fmt_static_lib}) # yaml parser lib diff --git a/tests/manual/CMakeLists.txt b/tests/manual/CMakeLists.txt index 7a48747db..df6ecb100 100644 --- a/tests/manual/CMakeLists.txt +++ b/tests/manual/CMakeLists.txt @@ -31,7 +31,7 @@ foreach(test_cpp ${test_type_cpps}) # threads (cross-platform) target_link_libraries(${target_name} Threads::Threads) # memgraph lib - target_link_libraries(${target_name} memgraph) + target_link_libraries(${target_name} memgraph_lib) # fmt format lib target_link_libraries(${target_name} ${fmt_static_lib}) # yaml parser lib diff --git a/tests/unit/CMakeLists.txt b/tests/unit/CMakeLists.txt index 7a48747db..df6ecb100 100644 --- a/tests/unit/CMakeLists.txt +++ b/tests/unit/CMakeLists.txt @@ -31,7 +31,7 @@ foreach(test_cpp ${test_type_cpps}) # threads (cross-platform) target_link_libraries(${target_name} Threads::Threads) # memgraph lib - target_link_libraries(${target_name} memgraph) + target_link_libraries(${target_name} memgraph_lib) # fmt format lib target_link_libraries(${target_name} ${fmt_static_lib}) # yaml parser lib diff --git a/tests/unit/cypher_traversal.cpp b/tests/unit/cypher_traversal.cpp index 1b94d129c..16123d92b 100644 --- a/tests/unit/cypher_traversal.cpp +++ b/tests/unit/cypher_traversal.cpp @@ -19,7 +19,7 @@ auto load_queries() { std::vector queries; - fs::path queries_path = "data/cypher_queries"; + fs::path queries_path = "data/queries/cypher"; std::string query_file_extension = "cypher"; for (auto& directory_entry : From 948694fd27ff14ce7fce3f3e8746b8a44383d2bb Mon Sep 17 00:00:00 2001 From: Marko Budiselic Date: Sun, 18 Dec 2016 10:16:07 +0100 Subject: [PATCH 12/26] tests data bug fix - too many copies of tests/data --- CMakeLists.txt | 13 ------------- tests/CMakeLists.txt | 16 +++++++++++++--- tests/unit/cypher_traversal.cpp | 2 +- 3 files changed, 14 insertions(+), 17 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 2fdf552c3..9f6c9eb5c 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -40,19 +40,6 @@ set(cxx_standard 14) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++1y") # ----------------------------------------------------------------------------- -# custom targets -# move test data data to the build directory -if (UNIX) - set(test_data "tests/data") - set(test_data_src "${CMAKE_SOURCE_DIR}/${test_data}") - set(test_data_dst "${CMAKE_BINARY_DIR}/${test_data}") - add_custom_target (test_data - COMMAND rm -rf ${test_data_dst} - COMMAND cp -r ${test_data_src} ${test_data_dst} - ) -endif (UNIX) -# ----------------------------------------------------------------------------- - # dir variables set(src_dir ${CMAKE_SOURCE_DIR}/src) set(libs_dir ${CMAKE_SOURCE_DIR}/libs) diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 8e243cf8d..a83e7954d 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -6,11 +6,21 @@ enable_testing() include_directories(${catch_source_dir}/include) -# copy unit test data +# copy test data file(COPY ${CMAKE_SOURCE_DIR}/tests/data DESTINATION ${CMAKE_BINARY_DIR}/tests) -file(COPY ${CMAKE_SOURCE_DIR}/tests/data - DESTINATION ${CMAKE_BINARY_DIR}/tests/unit) + +# move test data data to the build directory +if (UNIX) + set(test_data "tests/data") + set(test_data_src "${CMAKE_SOURCE_DIR}/${test_data}") + set(test_data_dst "${CMAKE_BINARY_DIR}/${test_data}") + add_custom_target (test_data + COMMAND rm -rf ${test_data_dst} + COMMAND cp -r ${test_data_src} ${test_data_dst} + ) +endif (UNIX) +# ----------------------------------------------------------------------------- # benchmark test binaries if (ALL_TESTS OR BENCHMARK_TESTS) diff --git a/tests/unit/cypher_traversal.cpp b/tests/unit/cypher_traversal.cpp index 16123d92b..85c1ba529 100644 --- a/tests/unit/cypher_traversal.cpp +++ b/tests/unit/cypher_traversal.cpp @@ -19,7 +19,7 @@ auto load_queries() { std::vector queries; - fs::path queries_path = "data/queries/cypher"; + fs::path queries_path = "../data/queries/cypher"; std::string query_file_extension = "cypher"; for (auto& directory_entry : From 4d6c315c1e943642bd639c6647e4d76baa418aee Mon Sep 17 00:00:00 2001 From: Marko Budiselic Date: Sun, 18 Dec 2016 19:21:29 +0100 Subject: [PATCH 13/26] Block Allocator Test - initial implementation Summary: Block Allocator Test - initial implementation Test Plan: ctest -R memgraph_unit_block_allocator Reviewers: sale Subscribers: sale, buda Differential Revision: https://memgraph.phacility.com/D20 --- include/utils/auto_scope.hpp | 7 ++ include/utils/memory/block_allocator.hpp | 39 ++++++--- tests/unit/CMakeLists.txt | 2 + tests/unit/basic_bloom_filter.cpp | 47 +++++----- tests/unit/block_allocator.cpp | 24 +++++ tests/unit/parameter_index.cpp | 5 +- tests/unit/program_argument.cpp | 106 ++++++++++++----------- 7 files changed, 142 insertions(+), 88 deletions(-) create mode 100644 tests/unit/block_allocator.cpp diff --git a/include/utils/auto_scope.hpp b/include/utils/auto_scope.hpp index 10de5751f..119078a91 100644 --- a/include/utils/auto_scope.hpp +++ b/include/utils/auto_scope.hpp @@ -55,3 +55,10 @@ private: TOKEN_PASTE(auto_, counter)(TOKEN_PASTE(auto_func_, counter)); #define Auto(Destructor) Auto_INTERNAL(Destructor, __COUNTER__) + +// -- example: +// Auto(f()); +// -- is expended to: +// auto auto_func_1 = [&]() { f(); }; +// OnScopeExit auto_1(auto_func_1); +// -- f() is called at the end of a scope diff --git a/include/utils/memory/block_allocator.hpp b/include/utils/memory/block_allocator.hpp index f0c0e6475..f7eb3791f 100644 --- a/include/utils/memory/block_allocator.hpp +++ b/include/utils/memory/block_allocator.hpp @@ -5,6 +5,9 @@ #include "utils/auto_scope.hpp" +/* @brief Allocates blocks of block_size and stores + * the pointers on allocated blocks inside a vector. + */ template class BlockAllocator { @@ -23,29 +26,45 @@ public: BlockAllocator(size_t capacity = 0) { for (size_t i = 0; i < capacity; ++i) - blocks.emplace_back(); + unused_.emplace_back(); } ~BlockAllocator() { - for (auto b : blocks) { - free(b.data); - } - blocks.clear(); + for (auto block : unused_) + free(block.data); + unused_.clear(); + for (auto block : release_) + free(block.data); + release_.clear(); + } + + size_t unused_size() const + { + return unused_.size(); + } + + size_t release_size() const + { + return release_.size(); } // Returns nullptr on no memory. void *acquire() { - if (blocks.size() == 0) blocks.emplace_back(); + if (unused_.size() == 0) unused_.emplace_back(); - auto ptr = blocks.back().data; - Auto(blocks.pop_back()); + auto ptr = unused_.back().data; + Auto(unused_.pop_back()); return ptr; } - void release(void *ptr) { blocks.emplace_back(ptr); } + void release(void *ptr) { release_.emplace_back(ptr); } private: - std::vector blocks; + // TODO: try implement with just one vector + // but consecutive acquire release calls should work + // TODO: measure first! + std::vector unused_; + std::vector release_; }; diff --git a/tests/unit/CMakeLists.txt b/tests/unit/CMakeLists.txt index df6ecb100..85b360288 100644 --- a/tests/unit/CMakeLists.txt +++ b/tests/unit/CMakeLists.txt @@ -26,6 +26,8 @@ foreach(test_cpp ${test_type_cpps}) set_target_properties(${target_name} PROPERTIES OUTPUT_NAME ${exec_name}) # link libraries + # gtest + target_link_libraries(${target_name} gtest gtest_main) # filesystem target_link_libraries(${target_name} stdc++fs) # threads (cross-platform) diff --git a/tests/unit/basic_bloom_filter.cpp b/tests/unit/basic_bloom_filter.cpp index ac4df7fc2..15a41294c 100644 --- a/tests/unit/basic_bloom_filter.cpp +++ b/tests/unit/basic_bloom_filter.cpp @@ -9,37 +9,34 @@ #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wwritable-strings" -using StringHashFunction = std::function; - -TEST_CASE("BloomFilter Test") { - StringHashFunction hash1 = fnv64; - StringHashFunction hash2 = fnv1a64; +using StringHashFunction = std::function; - auto c = [](auto x) -> int { - return x % 4; - } ; - std::vector funcs = { - hash1, hash2 - }; +TEST_CASE("BloomFilter Test") +{ + StringHashFunction hash1 = fnv64; + StringHashFunction hash2 = fnv1a64; - BloomFilter bloom(funcs); + auto c = [](auto x) -> int { return x % 4; }; + std::vector funcs = {hash1, hash2}; - std::string test = "test"; - std::string kifla = "kifla"; + BloomFilter bloom(funcs); - std::cout << hash1(test) << std::endl; - std::cout << hash2(test) << std::endl; - - std::cout << hash1(kifla) << std::endl; - std::cout << hash2(kifla) << std::endl; + std::string test = "test"; + std::string kifla = "kifla"; - std::cout << bloom.contains(test) << std::endl; - bloom.insert(test); - std::cout << bloom.contains(test) << std::endl; + std::cout << hash1(test) << std::endl; + std::cout << hash2(test) << std::endl; - std::cout << bloom.contains(kifla) << std::endl; - bloom.insert(kifla); - std::cout << bloom.contains(kifla) << std::endl; + std::cout << hash1(kifla) << std::endl; + std::cout << hash2(kifla) << std::endl; + + std::cout << bloom.contains(test) << std::endl; + bloom.insert(test); + std::cout << bloom.contains(test) << std::endl; + + std::cout << bloom.contains(kifla) << std::endl; + bloom.insert(kifla); + std::cout << bloom.contains(kifla) << std::endl; } #pragma clang diagnostic pop diff --git a/tests/unit/block_allocator.cpp b/tests/unit/block_allocator.cpp new file mode 100644 index 000000000..35bf9cfdc --- /dev/null +++ b/tests/unit/block_allocator.cpp @@ -0,0 +1,24 @@ +#include "gtest/gtest.h" + +#include "utils/memory/block_allocator.hpp" + +TEST(BlockAllocatorTest, UnusedVsReleaseSize) +{ + BlockAllocator<64> block_allocator(10); + void *block = block_allocator.acquire(); + block_allocator.release(block); + EXPECT_EQ(block_allocator.unused_size(), 9); + EXPECT_EQ(block_allocator.release_size(), 1); +} + +TEST(BlockAllocatorTest, CountMallocAndFreeCalls) +{ + // TODO: implementation + EXPECT_EQ(true, true); +} + +int main(int argc, char **argv) +{ + ::testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/tests/unit/parameter_index.cpp b/tests/unit/parameter_index.cpp index 542591dd6..b2d137c79 100644 --- a/tests/unit/parameter_index.cpp +++ b/tests/unit/parameter_index.cpp @@ -8,12 +8,13 @@ using ParameterIndexKey::Type::Projection; auto main() -> int { - std::map parameter_index; + std::map parameter_index; parameter_index[ParameterIndexKey(InternalId, "n1")] = 0; parameter_index[ParameterIndexKey(InternalId, "n2")] = 1; - permanent_assert(parameter_index.size() == 2, "Parameter index size should be 2"); + permanent_assert(parameter_index.size() == 2, + "Parameter index size should be 2"); return 0; } diff --git a/tests/unit/program_argument.cpp b/tests/unit/program_argument.cpp index a12ae190f..c5c54996e 100644 --- a/tests/unit/program_argument.cpp +++ b/tests/unit/program_argument.cpp @@ -6,80 +6,84 @@ #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wwritable-strings" -TEST_CASE("ProgramArgument FlagOnly Test") { - CLEAR_ARGS(); +TEST_CASE("ProgramArgument FlagOnly Test") +{ + CLEAR_ARGS(); - int argc = 2; - char* argv[] = {"ProgramArgument FlagOnly Test", "-test"}; + int argc = 2; + char *argv[] = {"ProgramArgument FlagOnly Test", "-test"}; - REGISTER_ARGS(argc, argv); - REGISTER_REQUIRED_ARGS({"-test"}); + REGISTER_ARGS(argc, argv); + REGISTER_REQUIRED_ARGS({"-test"}); - REQUIRE(CONTAINS_FLAG("-test") == true); + REQUIRE(CONTAINS_FLAG("-test") == true); } -TEST_CASE("ProgramArgument Single Entry Test") { - CLEAR_ARGS(); +TEST_CASE("ProgramArgument Single Entry Test") +{ + CLEAR_ARGS(); - int argc = 3; - char* argv[] = {"ProgramArgument Single Entry Test", "-bananas", "99"}; + int argc = 3; + char *argv[] = {"ProgramArgument Single Entry Test", "-bananas", "99"}; - REGISTER_REQUIRED_ARGS({"-bananas"}); - REGISTER_ARGS(argc, argv); + REGISTER_REQUIRED_ARGS({"-bananas"}); + REGISTER_ARGS(argc, argv); - REQUIRE(GET_ARG("-bananas", "100").get_int() == 99); + REQUIRE(GET_ARG("-bananas", "100").get_int() == 99); } -TEST_CASE("ProgramArgument Multiple Entries Test") { - CLEAR_ARGS(); +TEST_CASE("ProgramArgument Multiple Entries Test") +{ + CLEAR_ARGS(); - int argc = 4; - char* argv[] = {"ProgramArgument Multiple Entries Test", "-files", - "first_file.txt", "second_file.txt"}; + int argc = 4; + char *argv[] = {"ProgramArgument Multiple Entries Test", "-files", + "first_file.txt", "second_file.txt"}; - REGISTER_ARGS(argc, argv); + REGISTER_ARGS(argc, argv); - auto files = GET_ARGS("-files", {}); + auto files = GET_ARGS("-files", {}); - REQUIRE(files[0].get_string() == "first_file.txt"); + REQUIRE(files[0].get_string() == "first_file.txt"); } -TEST_CASE("ProgramArgument Combination Test") { - CLEAR_ARGS(); +TEST_CASE("ProgramArgument Combination Test") +{ + CLEAR_ARGS(); - int argc = 14; - char* argv[] = {"ProgramArgument Combination Test", - "-run_tests", - "-tests", - "Test1", - "Test2", - "Test3", - "-run_times", - "10", - "-export", - "test1.txt", - "test2.txt", - "test3.txt", - "-import", - "data.txt"}; + int argc = 14; + char *argv[] = {"ProgramArgument Combination Test", + "-run_tests", + "-tests", + "Test1", + "Test2", + "Test3", + "-run_times", + "10", + "-export", + "test1.txt", + "test2.txt", + "test3.txt", + "-import", + "data.txt"}; - REGISTER_ARGS(argc, argv); + REGISTER_ARGS(argc, argv); - REQUIRE(CONTAINS_FLAG("-run_tests") == true); + REQUIRE(CONTAINS_FLAG("-run_tests") == true); - auto tests = GET_ARGS("-tests", {}); - REQUIRE(tests[0].get_string() == "Test1"); - REQUIRE(tests[1].get_string() == "Test2"); - REQUIRE(tests[2].get_string() == "Test3"); + auto tests = GET_ARGS("-tests", {}); + REQUIRE(tests[0].get_string() == "Test1"); + REQUIRE(tests[1].get_string() == "Test2"); + REQUIRE(tests[2].get_string() == "Test3"); - REQUIRE(GET_ARG("-run_times", "0").get_int() == 10); + REQUIRE(GET_ARG("-run_times", "0").get_int() == 10); - auto exports = GET_ARGS("-export", {}); - REQUIRE(exports[0].get_string() == "test1.txt"); - REQUIRE(exports[1].get_string() == "test2.txt"); - REQUIRE(exports[2].get_string() == "test3.txt"); + auto exports = GET_ARGS("-export", {}); + REQUIRE(exports[0].get_string() == "test1.txt"); + REQUIRE(exports[1].get_string() == "test2.txt"); + REQUIRE(exports[2].get_string() == "test3.txt"); - REQUIRE(GET_ARG("-import", "test.txt").get_string() == "data.txt"); + REQUIRE(GET_ARG("-import", "test.txt").get_string() == "data.txt"); } #pragma clang diagnostic pop From dc3433aa8ad283df577fbabe69f736aa6678fce3 Mon Sep 17 00:00:00 2001 From: Marko Budiselic Date: Sun, 18 Dec 2016 20:26:08 +0100 Subject: [PATCH 14/26] Stack Allocator Unit Test Summary: Stack Allocator Unit Test Test Plan: manual (unit tests are not passing because malloc and free counters have to be added) Reviewers: sale Subscribers: sale, buda Differential Revision: https://memgraph.phacility.com/D21 --- include/utils/memory/stack_allocator.hpp | 1 + tests/unit/block_allocator.cpp | 2 +- tests/unit/stack_allocator.cpp | 34 ++++++++++++++++++++++++ 3 files changed, 36 insertions(+), 1 deletion(-) create mode 100644 tests/unit/stack_allocator.cpp diff --git a/include/utils/memory/stack_allocator.hpp b/include/utils/memory/stack_allocator.hpp index 287bdad6a..15623e49d 100644 --- a/include/utils/memory/stack_allocator.hpp +++ b/include/utils/memory/stack_allocator.hpp @@ -3,6 +3,7 @@ #include #include "utils/exceptions/out_of_memory.hpp" +#include "utils/likely.hpp" #include "utils/memory/block_allocator.hpp" // http://en.cppreference.com/w/cpp/language/new diff --git a/tests/unit/block_allocator.cpp b/tests/unit/block_allocator.cpp index 35bf9cfdc..e2de1e405 100644 --- a/tests/unit/block_allocator.cpp +++ b/tests/unit/block_allocator.cpp @@ -14,7 +14,7 @@ TEST(BlockAllocatorTest, UnusedVsReleaseSize) TEST(BlockAllocatorTest, CountMallocAndFreeCalls) { // TODO: implementation - EXPECT_EQ(true, true); + EXPECT_EQ(true, false); } int main(int argc, char **argv) diff --git a/tests/unit/stack_allocator.cpp b/tests/unit/stack_allocator.cpp new file mode 100644 index 000000000..006ffbe36 --- /dev/null +++ b/tests/unit/stack_allocator.cpp @@ -0,0 +1,34 @@ +#include "gtest/gtest.h" + +#include "utils/memory/stack_allocator.hpp" + +struct Object +{ + int a; + int b; + + Object(int a, int b) : a(a), b(b) {} +}; + +TEST(StackAllocatorTest, AllocationAndObjectValidity) +{ + StackAllocator allocator; + for (int i = 0; i < 64 * 1024; ++i) + { + auto object = allocator.make(1, 2); + ASSERT_EQ(object->a, 1); + ASSERT_EQ(object->b, 2); + } +} + +TEST(StackAllocatorTest, CountMallocAndFreeCalls) +{ + // TODO: implementation + EXPECT_EQ(true, false); +} + +int main(int argc, char **argv) +{ + ::testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} From 18612a3063c644380854d1d148d28bf557c5e6be Mon Sep 17 00:00:00 2001 From: sale Date: Mon, 19 Dec 2016 10:02:24 +0000 Subject: [PATCH 15/26] Added dump methods to stacktrace --- include/utils/stacktrace.hpp | 11 ++++++----- src/memgraph_bolt.cpp | 3 +-- tests/unit/signal_handler.cpp | 6 +----- 3 files changed, 8 insertions(+), 12 deletions(-) diff --git a/include/utils/stacktrace.hpp b/include/utils/stacktrace.hpp index df0b8a829..ce063438a 100644 --- a/include/utils/stacktrace.hpp +++ b/include/utils/stacktrace.hpp @@ -58,17 +58,18 @@ public: return lines.size(); } - void dump(std::ostream& stream) { - std::string message; - dump(message); - stream << message; + template + void dump(Stream& stream) { + stream << dump(); } - void dump(std::string& message) { + std::string dump() { + std::string message; for (int i = 0; i < size(); i++) { message.append(fmt::format("at {} ({}) \n", lines[i].function, lines[i].location)); } + return message; } private: diff --git a/src/memgraph_bolt.cpp b/src/memgraph_bolt.cpp index aaf9179d6..1c0585068 100644 --- a/src/memgraph_bolt.cpp +++ b/src/memgraph_bolt.cpp @@ -22,8 +22,7 @@ static constexpr const char* port = "7687"; void throw_and_stacktace(std::string message) { Stacktrace stacktrace; - stacktrace.dump(message); - logger.info(message); + logger.info(stacktrace.dump()); } int main(void) { diff --git a/tests/unit/signal_handler.cpp b/tests/unit/signal_handler.cpp index e38d61f67..12ee95e5a 100644 --- a/tests/unit/signal_handler.cpp +++ b/tests/unit/signal_handler.cpp @@ -12,11 +12,7 @@ TEST_CASE("SignalHandler Segmentation Fault Test") { SignalHandler::register_handler(Signal::SegmentationFault, []() { std::cout << "Segmentation Fault" << std::endl; Stacktrace stacktrace; - - std::string message; - stacktrace.dump(message); - std::cout << message << std::endl; - + std::cout << stacktrace.dump() << std::endl; }); std::raise(SIGSEGV); From 362bc3ba48abf40529111e6aa6b3404d6d5ea2ab Mon Sep 17 00:00:00 2001 From: Marko Budiselic Date: Mon, 19 Dec 2016 18:32:44 +0100 Subject: [PATCH 16/26] Bug fixes: RELEASE MODE - asserts --- cmake/copy_includes.cmake | 3 +- include/utils/assert.hpp | 5 +- include/utils/exceptions/basic_exception.hpp | 52 +++----- include/utils/signals/handler.hpp | 52 ++++---- include/utils/stacktrace/log.hpp | 11 ++ include/utils/{ => stacktrace}/stacktrace.hpp | 69 +++++------ include/utils/terminate_handler.hpp | 32 +++-- src/memgraph_bolt.cpp | 114 +++++++++--------- src/storage/edge_accessor.cpp | 7 +- tests/integration/cleaning.cpp | 69 ++++++----- tests/integration/index.cpp | 109 +++++++++++------ tests/integration/snapshot.cpp | 72 ++++++----- tests/unit/signal_handler.cpp | 17 +-- 13 files changed, 338 insertions(+), 274 deletions(-) create mode 100644 include/utils/stacktrace/log.hpp rename include/utils/{ => stacktrace}/stacktrace.hpp (52%) diff --git a/cmake/copy_includes.cmake b/cmake/copy_includes.cmake index 4b0ad5a4b..5508be6cd 100644 --- a/cmake/copy_includes.cmake +++ b/cmake/copy_includes.cmake @@ -118,7 +118,8 @@ FILE(COPY ${include_dir}/utils/char_str.hpp DESTINATION ${build_include_dir}/uti FILE(COPY ${include_dir}/utils/void.hpp DESTINATION ${build_include_dir}/utils) FILE(COPY ${include_dir}/utils/array_store.hpp DESTINATION ${build_include_dir}/utils) FILE(COPY ${include_dir}/utils/bswap.hpp DESTINATION ${build_include_dir}/utils) -FILE(COPY ${include_dir}/utils/stacktrace.hpp DESTINATION ${build_include_dir}/utils) +FILE(COPY ${include_dir}/utils/stacktrace/stacktrace.hpp DESTINATION ${build_include_dir}/utils/stacktrace) +FILE(COPY ${include_dir}/utils/stacktrace/log.hpp DESTINATION ${build_include_dir}/utils/stacktrace) FILE(COPY ${include_dir}/utils/auto_scope.hpp DESTINATION ${build_include_dir}/utils) FILE(COPY ${include_dir}/utils/assert.hpp DESTINATION ${build_include_dir}/utils) FILE(COPY ${include_dir}/utils/reference_wrapper.hpp DESTINATION ${build_include_dir}/utils) diff --git a/include/utils/assert.hpp b/include/utils/assert.hpp index 52c4ee49c..b5fec6070 100644 --- a/include/utils/assert.hpp +++ b/include/utils/assert.hpp @@ -25,9 +25,12 @@ // parmanant exception will always be executed #define permanent_assert(condition, message) \ - if (!(condition)) { \ + if (!(condition)) \ + { \ std::ostringstream s; \ s << message; \ + std::cout << s.str() << std::endl; \ + std::exit(EXIT_FAILURE); \ } // assert_error_handler_(__FILE__, __LINE__, s.str().c_str()); diff --git a/include/utils/exceptions/basic_exception.hpp b/include/utils/exceptions/basic_exception.hpp index 32459dcbf..9a30e854b 100644 --- a/include/utils/exceptions/basic_exception.hpp +++ b/include/utils/exceptions/basic_exception.hpp @@ -4,39 +4,25 @@ #include #include "utils/auto_scope.hpp" -#include "utils/stacktrace.hpp" +#include "utils/stacktrace/stacktrace.hpp" -class BasicException : public std::exception { - public: - BasicException(const std::string &message, uint64_t stacktrace_size) noexcept - : message_(message), - stacktrace_size_(stacktrace_size) { - generate_stacktrace(); - } - BasicException(const std::string &message) noexcept : message_(message), - stacktrace_size_(10) { - generate_stacktrace(); - } - - template - BasicException(const std::string &format, Args &&... args) noexcept - : BasicException(fmt::format(format, std::forward(args)...)) {} - - const char *what() const noexcept override { return message_.c_str(); } - - private: - std::string message_; - uint64_t stacktrace_size_; - - void generate_stacktrace() { -#ifndef NDEBUG - Stacktrace stacktrace; - - int size = std::min(stacktrace_size_, stacktrace.size()); - for (int i = 0; i < size; i++) { - message_.append(fmt::format("\n at {} ({})", stacktrace[i].function, - stacktrace[i].location)); +class BasicException : public std::exception +{ +public: + BasicException(const std::string &message) noexcept : message_(message) + { + Stacktrace stacktrace; + message_.append(stacktrace.dump()); } -#endif - } + + template + BasicException(const std::string &format, Args &&... args) noexcept + : BasicException(fmt::format(format, std::forward(args)...)) + { + } + + const char *what() const noexcept override { return message_.c_str(); } + +private: + std::string message_; }; diff --git a/include/utils/signals/handler.hpp b/include/utils/signals/handler.hpp index 18d833870..c9328b28d 100644 --- a/include/utils/signals/handler.hpp +++ b/include/utils/signals/handler.hpp @@ -8,34 +8,40 @@ using Function = std::function; -enum class Signal : int { - Terminate = SIGTERM, - SegmentationFault = SIGSEGV, - Interupt = SIGINT, - Quit = SIGQUIT, - Abort = SIGABRT +// TODO: align bits so signals can be combined +// Signal::Terminate | Signal::Interupt +enum class Signal : int +{ + Terminate = SIGTERM, + SegmentationFault = SIGSEGV, + Interupt = SIGINT, + Quit = SIGQUIT, + Abort = SIGABRT, + BusError = SIGBUS, }; -class SignalHandler { - private: - static std::map> handlers_; +class SignalHandler +{ +private: + static std::map> handlers_; - static void handle(int signal) { handlers_[signal](); } + static void handle(int signal) { handlers_[signal](); } - public: - static void register_handler(Signal signal, Function func) { - int signal_number = static_cast(signal); - handlers_[signal_number] = func; - std::signal(signal_number, SignalHandler::handle); - } - - // TODO possible changes if signelton needed later - /* - static SignalHandler& instance() { - static SignalHandler instance; - return instance; +public: + static void register_handler(Signal signal, Function func) + { + int signal_number = static_cast(signal); + handlers_[signal_number] = func; + std::signal(signal_number, SignalHandler::handle); } - */ + + // TODO possible changes if signelton needed later + /* + static SignalHandler& instance() { + static SignalHandler instance; + return instance; + } + */ }; std::map> SignalHandler::handlers_ = {}; diff --git a/include/utils/stacktrace/log.hpp b/include/utils/stacktrace/log.hpp new file mode 100644 index 000000000..31f273f4f --- /dev/null +++ b/include/utils/stacktrace/log.hpp @@ -0,0 +1,11 @@ +#pragma once + +#include "logging/default.hpp" +#include "utils/stacktrace/stacktrace.hpp" + +void log_stacktrace(const std::string& title) +{ + Stacktrace stacktrace; + logging::info(title); + logging::info(stacktrace.dump()); +} diff --git a/include/utils/stacktrace.hpp b/include/utils/stacktrace/stacktrace.hpp similarity index 52% rename from include/utils/stacktrace.hpp rename to include/utils/stacktrace/stacktrace.hpp index ce063438a..e33050def 100644 --- a/include/utils/stacktrace.hpp +++ b/include/utils/stacktrace/stacktrace.hpp @@ -1,10 +1,10 @@ #pragma once #include -#include #include - #include +#include + #include "utils/auto_scope.hpp" class Stacktrace @@ -13,11 +13,13 @@ public: class Line { public: - Line(const std::string& original) : original(original) {} + Line(const std::string &original) : original(original) {} - Line(const std::string& original, const std::string& function, - const std::string& location) - : original(original), function(function), location(location) {} + Line(const std::string &original, const std::string &function, + const std::string &location) + : original(original), function(function), location(location) + { + } std::string original, function, location; }; @@ -26,17 +28,17 @@ public: Stacktrace() { - void* addresses[stacktrace_depth]; + void *addresses[stacktrace_depth]; auto depth = backtrace(addresses, stacktrace_depth); // will this leak if backtrace_symbols throws? - char** symbols = nullptr; + char **symbols = nullptr; Auto(free(symbols)); symbols = backtrace_symbols(addresses, depth); // skip the first one since it will be Stacktrace::Stacktrace() - for(int i = 1; i < depth; ++i) + for (int i = 1; i < depth; ++i) lines.emplace_back(format(symbols[i])); } @@ -48,54 +50,53 @@ public: auto end() const { return lines.end(); } auto cend() const { return lines.cend(); } - const Line& operator[](size_t idx) const - { - return lines[idx]; - } + const Line &operator[](size_t idx) const { return lines[idx]; } - size_t size() const - { - return lines.size(); - } + size_t size() const { return lines.size(); } template - void dump(Stream& stream) { - stream << dump(); + void dump(Stream &stream) + { + stream << dump(); } - - std::string dump() { - std::string message; - for (int i = 0; i < size(); i++) { - message.append(fmt::format("at {} ({}) \n", lines[i].function, - lines[i].location)); - } - return message; + + std::string dump() + { + std::string message; + for (size_t i = 0; i < size(); i++) + { + message.append(fmt::format("at {} ({}) \n", lines[i].function, + lines[i].location)); + } + return message; } private: std::vector lines; - Line format(const std::string& original) + Line format(const std::string &original) { using namespace abi; auto line = original; auto begin = line.find('('); - auto end = line.find('+'); + auto end = line.find('+'); - if(begin == std::string::npos || end == std::string::npos) + if (begin == std::string::npos || end == std::string::npos) return {original}; line[end] = '\0'; int s; - auto demangled = __cxa_demangle(line.data() + begin + 1, nullptr, - nullptr, &s); + auto demangled = + __cxa_demangle(line.data() + begin + 1, nullptr, nullptr, &s); auto location = line.substr(0, begin); - auto function = demangled ? std::string(demangled) - : fmt::format("{}()", original.substr(begin + 1, end - begin - 1)); + auto function = + demangled ? std::string(demangled) + : fmt::format("{}()", original.substr(begin + 1, + end - begin - 1)); return {original, function, location}; } diff --git a/include/utils/terminate_handler.hpp b/include/utils/terminate_handler.hpp index 467522e44..c24e1a27f 100644 --- a/include/utils/terminate_handler.hpp +++ b/include/utils/terminate_handler.hpp @@ -1,28 +1,34 @@ #pragma once #include "utils/auto_scope.hpp" -#include "utils/stacktrace.hpp" +#include "utils/stacktrace/stacktrace.hpp" #include #include // TODO: log to local file or remote database -void stacktrace(std::ostream& stream) noexcept { - Stacktrace stacktrace; - stacktrace.dump(stream); +void stacktrace(std::ostream &stream) noexcept +{ + Stacktrace stacktrace; + stacktrace.dump(stream); } // TODO: log to local file or remote database -void terminate_handler(std::ostream& stream) noexcept { - if (auto exc = std::current_exception()) { - try { - std::rethrow_exception(exc); - } catch (std::exception& ex) { - stream << ex.what() << std::endl << std::endl; - stacktrace(stream); +void terminate_handler(std::ostream &stream) noexcept +{ + if (auto exc = std::current_exception()) + { + try + { + std::rethrow_exception(exc); + } + catch (std::exception &ex) + { + stream << ex.what() << std::endl << std::endl; + stacktrace(stream); + } } - } - std::abort(); + std::abort(); } void terminate_handler() noexcept { terminate_handler(std::cout); } diff --git a/src/memgraph_bolt.cpp b/src/memgraph_bolt.cpp index 1c0585068..7f6498794 100644 --- a/src/memgraph_bolt.cpp +++ b/src/memgraph_bolt.cpp @@ -1,5 +1,5 @@ -#include #include +#include #include "communication/bolt/v1/server/server.hpp" #include "communication/bolt/v1/server/worker.hpp" @@ -10,78 +10,74 @@ #include "logging/streams/stdout.hpp" #include "utils/signals/handler.hpp" -#include "utils/stacktrace.hpp" #include "utils/terminate_handler.hpp" +#include "utils/stacktrace/log.hpp" -static bolt::Server* serverptr; +static bolt::Server *serverptr; Logger logger; -static constexpr const char* interface = "0.0.0.0"; -static constexpr const char* port = "7687"; +// TODO: load from configuration +static constexpr const char *interface = "0.0.0.0"; +static constexpr const char *port = "7687"; -void throw_and_stacktace(std::string message) { - Stacktrace stacktrace; - logger.info(stacktrace.dump()); -} - -int main(void) { - // TODO figure out what is the relationship between this and signals - // that are configured below - std::set_terminate(&terminate_handler); - -// logger init +int main(void) +{ +// logging init #ifdef SYNC_LOGGER - logging::init_sync(); + logging::init_sync(); #else - logging::init_async(); + logging::init_async(); #endif - logging::log->pipe(std::make_unique()); + logging::log->pipe(std::make_unique()); - // get Main logger - logger = logging::log->logger("Main"); - logger.info("{}", logging::log->type()); + // logger init + logger = logging::log->logger("Main"); + logger.info("{}", logging::log->type()); - SignalHandler::register_handler(Signal::SegmentationFault, []() { - throw_and_stacktace("SegmentationFault signal raised"); - exit(1); - }); + // unhandled exception handler + std::set_terminate(&terminate_handler); - SignalHandler::register_handler(Signal::Terminate, []() { - throw_and_stacktace("Terminate signal raised"); - exit(1); - }); + // signal handling + SignalHandler::register_handler(Signal::SegmentationFault, []() { + log_stacktrace("SegmentationFault signal raised"); + std::exit(EXIT_FAILURE); + }); + SignalHandler::register_handler(Signal::Terminate, []() { + log_stacktrace("Terminate signal raised"); + std::exit(EXIT_FAILURE); + }); + SignalHandler::register_handler(Signal::Abort, []() { + log_stacktrace("Abort signal raised"); + std::exit(EXIT_FAILURE); + }); - SignalHandler::register_handler(Signal::Abort, []() { - throw_and_stacktace("Abort signal raised"); - exit(1); - }); + // initialize socket + io::Socket socket; + try + { + socket = io::Socket::bind(interface, port); + } + catch (io::NetworkError e) + { + logger.error("Cannot bind to socket on {} at {}", interface, port); + logger.error("{}", e.what()); + std::exit(EXIT_FAILURE); + } + socket.set_non_blocking(); + socket.listen(1024); + logger.info("Listening on {} at {}", interface, port); - io::Socket socket; + // initialize server + bolt::Server server(std::move(socket)); + serverptr = &server; - try { - socket = io::Socket::bind(interface, port); - } catch (io::NetworkError e) { - logger.error("Cannot bind to socket on {} at {}", interface, port); - logger.error("{}", e.what()); + // server start with N threads + // TODO: N should be configurable + auto N = std::thread::hardware_concurrency(); + logger.info("Starting {} workers", N); + server.start(N); - std::exit(EXIT_FAILURE); - } - - socket.set_non_blocking(); - socket.listen(1024); - - logger.info("Listening on {} at {}", interface, port); - - bolt::Server server(std::move(socket)); - serverptr = &server; - - // TODO: N should be configurable - auto N = std::thread::hardware_concurrency(); - logger.info("Starting {} workers", N); - server.start(N); - - logger.info("Shutting down..."); - - return EXIT_SUCCESS; + logger.info("Shutting down..."); + return EXIT_SUCCESS; } diff --git a/src/storage/edge_accessor.cpp b/src/storage/edge_accessor.cpp index c8267e91b..3dbded0f7 100644 --- a/src/storage/edge_accessor.cpp +++ b/src/storage/edge_accessor.cpp @@ -2,6 +2,7 @@ #include +#include "utils/assert.hpp" #include "storage/vertex_record.hpp" #include "storage/edge_type/edge_type.hpp" @@ -10,10 +11,12 @@ void EdgeAccessor::remove() const RecordAccessor::remove(); auto from_va = from(); - assert(from_va.fill()); + auto from_va_is_full = from_va.fill(); + runtime_assert(from_va_is_full, "From Vertex Accessor is empty"); auto to_va = to(); - assert(to_va.fill()); + auto to_va_is_full = to_va.fill(); + permanent_assert(to_va_is_full, "To Vertex Accessor is empty"); from_va.update().record->data.out.remove(vlist); to_va.update().record->data.in.remove(vlist); diff --git a/tests/integration/cleaning.cpp b/tests/integration/cleaning.cpp index 7bdbb3bf8..1cbfeb45b 100644 --- a/tests/integration/cleaning.cpp +++ b/tests/integration/cleaning.cpp @@ -1,19 +1,24 @@ #include "_hardcoded_query/basic.hpp" #include "logging/default.hpp" #include "logging/streams/stdout.hpp" +#include "query/preprocesor.hpp" #include "query/strip/stripper.hpp" +#include "utils/assert.hpp" #include "utils/sysinfo/memory.hpp" -template -void run(size_t n, std::string &query, S &stripper, Q &qf) +QueryPreprocessor preprocessor; + +template +void run(size_t n, std::string &query, Q &qf) { - auto stripped = stripper.strip(query); - std::cout << "Running query [" << stripped.hash << "] for " << n << " time." - << std::endl; + auto stripped = preprocessor.preprocess(query); + + logging::info("Running query [{}] x {}.", stripped.hash, n); + for (int i = 0; i < n; i++) { properties_t vec = stripped.arguments; - assert(qf[stripped.hash](std::move(vec))); + permanent_assert(qf[stripped.hash](std::move(vec)), "Query failed!"); } } @@ -29,13 +34,10 @@ int main(void) logging::init_async(); logging::log->pipe(std::make_unique()); - size_t entities_number = 1000; - Db db("cleaning"); - auto query_functions = hardcode::load_basic_functions(db); - - auto stripper = make_query_stripper(TK_LONG, TK_FLOAT, TK_STR, TK_BOOL); + size_t entities_number = 1000; + auto query_functions = hardcode::load_basic_functions(db); std::string create_vertex_label = "CREATE (n:LABEL {name: \"cleaner_test\"}) RETURN n"; @@ -49,17 +51,21 @@ int main(void) // clean vertices // delete vertices a // clean vertices - run(entities_number, create_vertex_label, stripper, query_functions); - assert(db.graph.vertices.access().size() == entities_number); + run(entities_number, create_vertex_label, query_functions); + permanent_assert(db.graph.vertices.access().size() == entities_number, + "Entities number doesn't match"); clean_vertex(db); - assert(db.graph.vertices.access().size() == entities_number); + permanent_assert(db.graph.vertices.access().size() == entities_number, + "Entities number doesn't match (after cleaning)"); - run(1, delete_label_vertices, stripper, query_functions); - assert(db.graph.vertices.access().size() == entities_number); + run(1, delete_label_vertices, query_functions); + permanent_assert(db.graph.vertices.access().size() == entities_number, + "Entities number doesn't match (delete label vertices)"); clean_vertex(db); - assert(db.graph.vertices.access().size() == 0); + permanent_assert(db.graph.vertices.access().size() == 0, + "Db should be empty"); // ******************************* TEST 2 ********************************// // add vertices a @@ -68,26 +74,33 @@ int main(void) // delete vertices a // clean vertices // delete vertices all - run(entities_number, create_vertex_label, stripper, query_functions); - assert(db.graph.vertices.access().size() == entities_number); + run(entities_number, create_vertex_label, query_functions); + permanent_assert(db.graph.vertices.access().size() == entities_number, + "Entities number doesn't match"); - run(entities_number, create_vertex_other, stripper, query_functions); - assert(db.graph.vertices.access().size() == entities_number * 2); + run(entities_number, create_vertex_other, query_functions); + permanent_assert(db.graph.vertices.access().size() == entities_number * 2, + "Entities number doesn't match"); clean_vertex(db); - assert(db.graph.vertices.access().size() == entities_number * 2); + permanent_assert(db.graph.vertices.access().size() == entities_number * 2, + "Entities number doesn't match"); - run(1, delete_label_vertices, stripper, query_functions); - assert(db.graph.vertices.access().size() == entities_number * 2); + run(1, delete_label_vertices, query_functions); + permanent_assert(db.graph.vertices.access().size() == entities_number * 2, + "Entities number doesn't match"); clean_vertex(db); - assert(db.graph.vertices.access().size() == entities_number); + permanent_assert(db.graph.vertices.access().size() == entities_number, + "Entities number doesn't match"); - run(1, delete_all_vertices, stripper, query_functions); - assert(db.graph.vertices.access().size() == entities_number); + run(1, delete_all_vertices, query_functions); + permanent_assert(db.graph.vertices.access().size() == entities_number, + "Entities number doesn't match"); clean_vertex(db); - assert(db.graph.vertices.access().size() == 0); + permanent_assert(db.graph.vertices.access().size() == 0, + "Db should be empty"); // TODO: more tests diff --git a/tests/integration/index.cpp b/tests/integration/index.cpp index 6afc345b6..762134d58 100644 --- a/tests/integration/index.cpp +++ b/tests/integration/index.cpp @@ -3,10 +3,16 @@ #include "_hardcoded_query/basic.hpp" #include "logging/default.hpp" #include "logging/streams/stdout.hpp" +#include "query/preprocesor.hpp" #include "query/strip/stripper.hpp" #include "storage/indexes/indexes.hpp" +#include "utils/assert.hpp" +#include "utils/signals/handler.hpp" +#include "utils/stacktrace/log.hpp" #include "utils/sysinfo/memory.hpp" +QueryPreprocessor preprocessor; + // Returns uniform random size_t generator from range [0,n> auto rand_gen(size_t n) { @@ -17,44 +23,43 @@ auto rand_gen(size_t n) void run(size_t n, std::string &query, Db &db) { - auto stripper = make_query_stripper(TK_LONG, TK_FLOAT, TK_STR, TK_BOOL); + auto qf = hardcode::load_basic_functions(db); + auto stripped = preprocessor.preprocess(query); - auto qf = hardcode::load_basic_functions(db); + logging::info("Running query [{}] x {}.", stripped.hash, n); - auto stripped = stripper.strip(query); - std::cout << "Running query [" << stripped.hash << "] for " << n << " time." - << std::endl; - for (int i = 0; i < n; i++) { + for (int i = 0; i < n; i++) + { properties_t vec = stripped.arguments; - assert(qf[stripped.hash](std::move(vec))); + auto commited = qf[stripped.hash](std::move(vec)); + permanent_assert(commited, "Query execution failed"); } } void add_edge(size_t n, Db &db) { - auto stripper = make_query_stripper(TK_LONG, TK_FLOAT, TK_STR, TK_BOOL); - auto qf = hardcode::load_basic_functions(db); - std::string query = "MATCH (n1), (n2) WHERE ID(n1)=0 AND " "ID(n2)=1 CREATE (n1)<-[r:IS {age: " "25,weight: 70}]-(n2) RETURN r"; + auto stripped = preprocessor.preprocess(query); - auto stripped = stripper.strip(query); - std::cout << "Running query [" << stripped.hash << "] for " << n - << " time to add edge." << std::endl; + logging::info("Running query [{}] (add edge) x {}", stripped.hash, n); std::vector vertices; - for (auto &v : db.graph.vertices.access()) { + for (auto &v : db.graph.vertices.access()) + { vertices.push_back(v.second.id); } + permanent_assert(vertices.size() > 0, "Vertices size is zero"); auto rand = rand_gen(vertices.size()); - for (int i = 0; i < n; i++) { + for (int i = 0; i < n; i++) + { properties_t vec = stripped.arguments; - vec[0] = Property(Int64(vertices[rand()]), Flags::Int64); - vec[1] = Property(Int64(vertices[rand()]), Flags::Int64); - assert(qf[stripped.hash](std::move(vec))); + vec[0] = Property(Int64(vertices[rand()]), Flags::Int64); + vec[1] = Property(Int64(vertices[rand()]), Flags::Int64); + permanent_assert(qf[stripped.hash](std::move(vec)), "Add edge failed"); } } @@ -64,7 +69,7 @@ void add_property(Db &db, StoredProperty &prop) t.vertex_access().fill().update().for_all([&](auto va) { va.set(prop); }); - assert(t.commit()); + permanent_assert(t.commit(), "Add property failed"); } void add_vertex_property_serial_int(Db &db, PropertyFamily &f) @@ -79,7 +84,7 @@ void add_vertex_property_serial_int(Db &db, PropertyFamily &f) i++; }); - assert(t.commit()); + permanent_assert(t.commit(), "Add vertex property serial int failed"); } void add_edge_property_serial_int(Db &db, PropertyFamily &f) @@ -94,7 +99,7 @@ void add_edge_property_serial_int(Db &db, PropertyFamily &f) i++; }); - assert(t.commit()); + permanent_assert(t.commit(), "Add Edge property serial int failed"); } template @@ -103,8 +108,9 @@ size_t size(Db &db, IndexHolder &h) DbAccessor t(db); size_t count = 0; - auto oin = h.get_read(); - if (oin.is_present()) { + auto oin = h.get_read(); + if (oin.is_present()) + { oin.get()->for_range(t).for_all([&](auto va) mutable { count++; }); } @@ -115,8 +121,10 @@ size_t size(Db &db, IndexHolder &h) void assert_empty(Db &db) { - assert(db.graph.vertices.access().size() == 0); - assert(db.graph.edges.access().size() == 0); + permanent_assert(db.graph.vertices.access().size() == 0, + "DB isn't empty (vertices)"); + permanent_assert(db.graph.edges.access().size() == 0, + "DB isn't empty (edges)"); } void clean_vertex(Db &db) @@ -136,7 +144,7 @@ void clean_edge(Db &db) void clear_database(Db &db) { std::string delete_all_vertices = "MATCH (n) DELETE n"; - std::string delete_all_edges = "MATCH ()-[r]-() DELETE r"; + std::string delete_all_edges = "MATCH ()-[r]-() DELETE r"; run(1, delete_all_edges, db); run(1, delete_all_vertices, db); @@ -151,14 +159,16 @@ bool equal(Db &a, Db &b) auto acc_a = a.graph.vertices.access(); auto acc_b = b.graph.vertices.access(); - if (acc_a.size() != acc_b.size()) { + if (acc_a.size() != acc_b.size()) + { return false; } auto it_a = acc_a.begin(); auto it_b = acc_b.begin(); - for (auto i = acc_a.size(); i > 0; i--) { + for (auto i = acc_a.size(); i > 0; i--) + { // TODO: compare } } @@ -167,14 +177,16 @@ bool equal(Db &a, Db &b) auto acc_a = a.graph.edges.access(); auto acc_b = b.graph.edges.access(); - if (acc_a.size() != acc_b.size()) { + if (acc_a.size() != acc_b.size()) + { return false; } auto it_a = acc_a.begin(); auto it_b = acc_b.begin(); - for (auto i = acc_a.size(); i > 0; i--) { + for (auto i = acc_a.size(); i > 0; i--) + { // TODO: compare } } @@ -187,6 +199,16 @@ int main(void) logging::init_async(); logging::log->pipe(std::make_unique()); + SignalHandler::register_handler(Signal::SegmentationFault, []() { + log_stacktrace("SegmentationFault signal raised"); + std::exit(EXIT_FAILURE); + }); + + SignalHandler::register_handler(Signal::BusError, []() { + log_stacktrace("Bus error signal raised"); + std::exit(EXIT_FAILURE); + }); + size_t cvl_n = 1; std::string create_vertex_label = @@ -194,7 +216,7 @@ int main(void) std::string create_vertex_other = "CREATE (n:OTHER {name: \"cleaner_test\"}) RETURN n"; std::string delete_label_vertices = "MATCH (n:LABEL) DELETE n"; - std::string delete_all_vertices = "MATCH (n) DELETE n"; + std::string delete_all_vertices = "MATCH (n) DELETE n"; IndexDefinition vertex_property_nonunique_unordered = { IndexLocation{VertexSide, Option("prop"), @@ -215,15 +237,19 @@ int main(void) // ******************************* TEST 1 ********************************// { - std::cout << "TEST1" << std::endl; + logging::info("TEST 1"); // add indexes // add vertices LABEL // add edges // add vertices property // assert index size. Db db("index", false); - assert(db.indexes().add_index(vertex_property_nonunique_unordered)); - assert(db.indexes().add_index(edge_property_nonunique_unordered)); + permanent_assert( + db.indexes().add_index(vertex_property_nonunique_unordered), + "Add vertex index failed"); + permanent_assert( + db.indexes().add_index(edge_property_nonunique_unordered), + "Add edge index failed"); run(cvl_n, create_vertex_label, db); auto sp = StoredProperty( @@ -232,18 +258,21 @@ int main(void) .family_key()); add_property(db, sp); - assert(cvl_n == - size(db, db.graph.vertices.property_family_find_or_create("prop") - .index)); + permanent_assert( + cvl_n == size(db, db.graph.vertices + .property_family_find_or_create("prop") + .index), + "Create vertex property failed"); add_edge(cvl_n, db); add_edge_property_serial_int( db, db.graph.edges.property_family_find_or_create("prop")); - assert( + permanent_assert( cvl_n == - size(db, - db.graph.edges.property_family_find_or_create("prop").index)); + size(db, db.graph.edges.property_family_find_or_create("prop") + .index), + "Create edge property failed"); } // TODO: more tests diff --git a/tests/integration/snapshot.cpp b/tests/integration/snapshot.cpp index 6c8309e44..2c2157c87 100644 --- a/tests/integration/snapshot.cpp +++ b/tests/integration/snapshot.cpp @@ -1,12 +1,18 @@ #include +#include "_hardcoded_query/basic.hpp" #include "logging/default.hpp" #include "logging/streams/stdout.hpp" -#include "_hardcoded_query/basic.hpp" +#include "query/preprocesor.hpp" #include "query/strip/stripper.hpp" #include "storage/indexes/indexes.hpp" +#include "utils/assert.hpp" +#include "utils/signals/handler.hpp" +#include "utils/stacktrace/log.hpp" #include "utils/sysinfo/memory.hpp" +QueryPreprocessor preprocessor; + // Returns uniform random size_t generator from range [0,n> auto rand_gen(size_t n) { @@ -17,32 +23,28 @@ auto rand_gen(size_t n) void run(size_t n, std::string &query, Db &db) { - auto stripper = make_query_stripper(TK_LONG, TK_FLOAT, TK_STR, TK_BOOL); + auto qf = hardcode::load_basic_functions(db); + auto stripped = preprocessor.preprocess(query); - auto qf = hardcode::load_basic_functions(db); + logging::info("Running query {} [{}] x {}.", query, stripped.hash, n); - auto stripped = stripper.strip(query); - std::cout << "Running query [" << stripped.hash << "] for " << n << " time." - << std::endl; for (int i = 0; i < n; i++) { properties_t vec = stripped.arguments; - assert(qf[stripped.hash](std::move(vec))); + permanent_assert(qf[stripped.hash](std::move(vec)), "Query aborted"); } } void add_edge(size_t n, Db &db) { - auto stripper = make_query_stripper(TK_LONG, TK_FLOAT, TK_STR, TK_BOOL); - auto qf = hardcode::load_basic_functions(db); + auto qf = hardcode::load_basic_functions(db); std::string query = "MATCH (n1), (n2) WHERE ID(n1)=0 AND " "ID(n2)=1 CREATE (n1)<-[r:IS {age: " "25,weight: 70}]-(n2) RETURN r"; + auto stripped = preprocessor.preprocess(query); - auto stripped = stripper.strip(query); - std::cout << "Running query [" << stripped.hash << "] for " << n - << " time to add edge." << std::endl; + logging::info("Running query {} [{}] x {}.", query, stripped.hash, n); std::vector vertices; for (auto &v : db.graph.vertices.access()) @@ -56,7 +58,7 @@ void add_edge(size_t n, Db &db) properties_t vec = stripped.arguments; vec[0] = Property(Int64(vertices[rand()]), Flags::Int64); vec[1] = Property(Int64(vertices[rand()]), Flags::Int64); - assert(qf[stripped.hash](std::move(vec))); + permanent_assert(qf[stripped.hash](std::move(vec)), "Query aborted"); } } @@ -66,7 +68,8 @@ void add_property(Db &db, StoredProperty &prop) t.vertex_access().fill().for_all([&](auto va) { va.set(prop); }); - assert(t.commit()); + permanent_assert(t.commit(), "add property query aborted"); + ; } void add_property_different_int(Db &db, PropertyFamily &f) @@ -81,7 +84,7 @@ void add_property_different_int(Db &db, PropertyFamily &f) i++; }); - assert(t.commit()); + permanent_assert(t.commit(), "add property different int aborted"); } size_t size(Db &db, IndexHolder &h) @@ -102,8 +105,8 @@ size_t size(Db &db, IndexHolder &h) void assert_empty(Db &db) { - assert(db.graph.vertices.access().size() == 0); - assert(db.graph.edges.access().size() == 0); + permanent_assert(db.graph.vertices.access().size() == 0, "Db isn't empty"); + permanent_assert(db.graph.edges.access().size() == 0, "Db isn't empty"); } void clean_vertex(Db &db) @@ -178,6 +181,11 @@ int main(void) logging::init_async(); logging::log->pipe(std::make_unique()); + SignalHandler::register_handler(Signal::SegmentationFault, []() { + log_stacktrace("SegmentationFault signal raised"); + std::exit(EXIT_FAILURE); + }); + size_t cvl_n = 1000; std::string create_vertex_label = @@ -187,9 +195,8 @@ int main(void) std::string delete_label_vertices = "MATCH (n:LABEL) DELETE n"; std::string delete_all_vertices = "MATCH (n) DELETE n"; - // ******************************* TEST 1 ********************************// { - std::cout << "TEST1" << std::endl; + logging::info("TEST 1"); // make snapshot of empty db // add vertexs // add edges @@ -203,11 +210,11 @@ int main(void) clear_database(db); db.snap_engine.import(); assert_empty(db); + logging::info("END of TEST 1"); } - // ******************************* TEST 2 ********************************// { - std::cout << "TEST2" << std::endl; + logging::info("TEST 2"); // add vertexs // add edges // make snapshot of db @@ -223,13 +230,12 @@ int main(void) db.snap_engine.import(); { Db db2("snapshot"); - assert(equal(db, db2)); + permanent_assert(equal(db, db2), "Dbs aren't equal"); } } - // ******************************* TEST 3 ********************************// { - std::cout << "TEST3" << std::endl; + logging::info("TEST 3"); // add vertexs // add edges // make snapshot of db @@ -240,13 +246,12 @@ int main(void) db.snap_engine.make_snapshot(); { Db db2("not_snapshot"); - assert(!equal(db, db2)); + permanent_assert(!equal(db, db2), "Dbs are equal"); } } - // ******************************* TEST 4 ********************************// { - std::cout << "TEST4" << std::endl; + logging::info("TEST 4"); // add vertices LABEL // add properties // add vertices LABEL @@ -265,14 +270,17 @@ int main(void) IndexLocation{VertexSide, Option("prop"), Option(), Option()}, IndexType{false, None}}; - assert(db.indexes().add_index(idef)); - assert(cvl_n == size(db, family.index)); + permanent_assert(db.indexes().add_index(idef), "Index isn't added"); + permanent_assert(cvl_n == size(db, family.index), + "Index size isn't valid"); db.snap_engine.make_snapshot(); { Db db2("snapshot"); - assert(cvl_n == size(db, db2.graph.vertices - .property_family_find_or_create("prop") - .index)); + permanent_assert( + cvl_n == size(db, db2.graph.vertices + .property_family_find_or_create("prop") + .index), + "Index size isn't valid"); } } diff --git a/tests/unit/signal_handler.cpp b/tests/unit/signal_handler.cpp index 12ee95e5a..d61e50e09 100644 --- a/tests/unit/signal_handler.cpp +++ b/tests/unit/signal_handler.cpp @@ -6,14 +6,15 @@ #include #include "utils/signals/handler.hpp" -#include "utils/stacktrace.hpp" +#include "utils/stacktrace/stacktrace.hpp" -TEST_CASE("SignalHandler Segmentation Fault Test") { - SignalHandler::register_handler(Signal::SegmentationFault, []() { - std::cout << "Segmentation Fault" << std::endl; - Stacktrace stacktrace; - std::cout << stacktrace.dump() << std::endl; - }); +TEST_CASE("SignalHandler Segmentation Fault Test") +{ + SignalHandler::register_handler(Signal::SegmentationFault, []() { + std::cout << "Segmentation Fault" << std::endl; + Stacktrace stacktrace; + std::cout << stacktrace.dump() << std::endl; + }); - std::raise(SIGSEGV); + std::raise(SIGSEGV); } From 96406615199bf7fd71a4f978091e718ac388d386 Mon Sep 17 00:00:00 2001 From: Marko Budiselic Date: Tue, 20 Dec 2016 15:49:07 +0100 Subject: [PATCH 17/26] Doxygen setup Summary: Doxygen setup Test Plan: run doxygen Doxyfile Reviewers: sale Subscribers: buda, sale Differential Revision: https://memgraph.phacility.com/D22 --- Doxyfile | 2436 +++++++++++++++++++++++++++++++++++++++++++++++ Doxylogo.png | Bin 0 -> 6742 bytes docs/.gitignore | 2 + docs/README.md | 7 + docs/index.md | 1 - 5 files changed, 2445 insertions(+), 1 deletion(-) create mode 100644 Doxyfile create mode 100644 Doxylogo.png create mode 100644 docs/.gitignore create mode 100644 docs/README.md delete mode 100644 docs/index.md diff --git a/Doxyfile b/Doxyfile new file mode 100644 index 000000000..77d57872c --- /dev/null +++ b/Doxyfile @@ -0,0 +1,2436 @@ +# Doxyfile 1.8.11 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project. +# +# All text after a double hash (##) is considered a comment and is placed in +# front of the TAG it is preceding. +# +# All text after a single hash (#) is considered a comment and will be ignored. +# The format is: +# TAG = value [value, ...] +# For lists, items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (\" \"). + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all text +# before the first occurrence of this tag. Doxygen uses libiconv (or the iconv +# built into libc) for the transcoding. See http://www.gnu.org/software/libiconv +# for the list of possible encodings. +# The default value is: UTF-8. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by +# double-quotes, unless you are using Doxywizard) that should identify the +# project for which the documentation is generated. This name is used in the +# title of most generated pages and in a few other places. +# The default value is: My Project. + +PROJECT_NAME = "" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. This +# could be handy for archiving the generated documentation or if some version +# control system is used. + +PROJECT_NUMBER = + +# Using the PROJECT_BRIEF tag one can provide an optional one line description +# for a project that appears at the top of each page and should give viewer a +# quick idea about the purpose of the project. Keep the description short. + +PROJECT_BRIEF = "The World's Most Powerful Graph Database" + +# With the PROJECT_LOGO tag one can specify a logo or an icon that is included +# in the documentation. The maximum height of the logo should not exceed 55 +# pixels and the maximum width should not exceed 200 pixels. Doxygen will copy +# the logo to the output directory. + +PROJECT_LOGO = Doxylogo.png + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path +# into which the generated documentation will be written. If a relative path is +# entered, it will be relative to the location where doxygen was started. If +# left blank the current directory will be used. + +OUTPUT_DIRECTORY = docs + +# If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub- +# directories (in 2 levels) under the output directory of each output format and +# will distribute the generated files over these directories. Enabling this +# option can be useful when feeding doxygen a huge amount of source files, where +# putting all generated files in the same directory would otherwise causes +# performance problems for the file system. +# The default value is: NO. + +CREATE_SUBDIRS = NO + +# If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII +# characters to appear in the names of generated files. If set to NO, non-ASCII +# characters will be escaped, for example _xE3_x81_x84 will be used for Unicode +# U+3044. +# The default value is: NO. + +ALLOW_UNICODE_NAMES = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese, +# Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States), +# Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian, +# Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages), +# Korean, Korean-en (Korean with English messages), Latvian, Lithuanian, +# Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian, +# Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish, +# Ukrainian and Vietnamese. +# The default value is: English. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member +# descriptions after the members that are listed in the file and class +# documentation (similar to Javadoc). Set to NO to disable this. +# The default value is: YES. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief +# description of a member or function before the detailed description +# +# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. +# The default value is: YES. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator that is +# used to form the text in various listings. Each string in this list, if found +# as the leading text of the brief description, will be stripped from the text +# and the result, after processing the whole list, is used as the annotated +# text. Otherwise, the brief description is used as-is. If left blank, the +# following values are used ($name is automatically replaced with the name of +# the entity):The $name class, The $name widget, The $name file, is, provides, +# specifies, contains, represents, a, an and the. + +ABBREVIATE_BRIEF = + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# doxygen will generate a detailed section even if there is only a brief +# description. +# The default value is: NO. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. +# The default value is: NO. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path +# before files name in the file list and in the header files. If set to NO the +# shortest path that makes the file name unique will be used +# The default value is: YES. + +FULL_PATH_NAMES = YES + +# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path. +# Stripping is only done if one of the specified strings matches the left-hand +# part of the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the path to +# strip. +# +# Note that you can specify absolute paths here, but also relative paths, which +# will be relative from the directory where doxygen is started. +# This tag requires that the tag FULL_PATH_NAMES is set to YES. + +STRIP_FROM_PATH = + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the +# path mentioned in the documentation of a class, which tells the reader which +# header file to include in order to use a class. If left blank only the name of +# the header file containing the class definition is used. Otherwise one should +# specify the list of include paths that are normally passed to the compiler +# using the -I flag. + +STRIP_FROM_INC_PATH = + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but +# less readable) file names. This can be useful is your file systems doesn't +# support long names like on DOS, Mac, or CD-ROM. +# The default value is: NO. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the +# first line (until the first dot) of a Javadoc-style comment as the brief +# description. If set to NO, the Javadoc-style will behave just like regular Qt- +# style comments (thus requiring an explicit @brief command for a brief +# description.) +# The default value is: NO. + +JAVADOC_AUTOBRIEF = NO + +# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first +# line (until the first dot) of a Qt-style comment as the brief description. If +# set to NO, the Qt-style will behave just like regular Qt-style comments (thus +# requiring an explicit \brief command for a brief description.) +# The default value is: NO. + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a +# multi-line C++ special comment block (i.e. a block of //! or /// comments) as +# a brief description. This used to be the default behavior. The new default is +# to treat a multi-line C++ comment block as a detailed description. Set this +# tag to YES if you prefer the old behavior instead. +# +# Note that setting this tag to YES also means that rational rose comments are +# not recognized any more. +# The default value is: NO. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the +# documentation from any documented member that it re-implements. +# The default value is: YES. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new +# page for each member. If set to NO, the documentation of a member will be part +# of the file/class/namespace that contains it. +# The default value is: NO. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen +# uses this value to replace tabs by spaces in code fragments. +# Minimum value: 1, maximum value: 16, default value: 4. + +TAB_SIZE = 4 + +# This tag can be used to specify a number of aliases that act as commands in +# the documentation. An alias has the form: +# name=value +# For example adding +# "sideeffect=@par Side Effects:\n" +# will allow you to put the command \sideeffect (or @sideeffect) in the +# documentation, which will result in a user-defined paragraph with heading +# "Side Effects:". You can put \n's in the value part of an alias to insert +# newlines. + +ALIASES = + +# This tag can be used to specify a number of word-keyword mappings (TCL only). +# A mapping has the form "name=value". For example adding "class=itcl::class" +# will allow you to use the command class in the itcl::class meaning. + +TCL_SUBST = + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources +# only. Doxygen will then generate output that is more tailored for C. For +# instance, some of the names that are used will be different. The list of all +# members will be omitted, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or +# Python sources only. Doxygen will then generate output that is more tailored +# for that language. For instance, namespaces will be presented as packages, +# qualified scopes will look different, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources. Doxygen will then generate output that is tailored for Fortran. +# The default value is: NO. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for VHDL. +# The default value is: NO. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it +# parses. With this tag you can assign which parser to use for a given +# extension. Doxygen has a built-in mapping, but you can override or extend it +# using this tag. The format is ext=language, where ext is a file extension, and +# language is one of the parsers supported by doxygen: IDL, Java, Javascript, +# C#, C, C++, D, PHP, Objective-C, Python, Fortran (fixed format Fortran: +# FortranFixed, free formatted Fortran: FortranFree, unknown formatted Fortran: +# Fortran. In the later case the parser tries to guess whether the code is fixed +# or free formatted code, this is the default for Fortran type files), VHDL. For +# instance to make doxygen treat .inc files as Fortran files (default is PHP), +# and .f files as C (default is Fortran), use: inc=Fortran f=C. +# +# Note: For files without extension you can use no_extension as a placeholder. +# +# Note that for custom extensions you also need to set FILE_PATTERNS otherwise +# the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments +# according to the Markdown format, which allows for more readable +# documentation. See http://daringfireball.net/projects/markdown/ for details. +# The output of markdown processing is further processed by doxygen, so you can +# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in +# case of backward compatibilities issues. +# The default value is: YES. + +MARKDOWN_SUPPORT = YES + +# When enabled doxygen tries to link words that correspond to documented +# classes, or namespaces to their corresponding documentation. Such a link can +# be prevented in individual cases by putting a % sign in front of the word or +# globally by setting AUTOLINK_SUPPORT to NO. +# The default value is: YES. + +AUTOLINK_SUPPORT = YES + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should set this +# tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); +# versus func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. +# The default value is: NO. + +BUILTIN_STL_SUPPORT = NO + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. +# The default value is: NO. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip (see: +# http://www.riverbankcomputing.co.uk/software/sip/intro) sources only. Doxygen +# will parse them like normal C++ but will assume all classes use public instead +# of private inheritance when no explicit protection keyword is present. +# The default value is: NO. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate +# getter and setter methods for a property. Setting this option to YES will make +# doxygen to replace the get and set methods by a property in the documentation. +# This will only work if the methods are indeed getting or setting a simple +# type. If this is not the case, or you want to show the methods anyway, you +# should set this option to NO. +# The default value is: YES. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. +# The default value is: NO. + +DISTRIBUTE_GROUP_DOC = NO + +# If one adds a struct or class to a group and this option is enabled, then also +# any nested class or struct is added to the same group. By default this option +# is disabled and one has to add nested compounds explicitly via \ingroup. +# The default value is: NO. + +GROUP_NESTED_COMPOUNDS = NO + +# Set the SUBGROUPING tag to YES to allow class member groups of the same type +# (for instance a group of public functions) to be put as a subgroup of that +# type (e.g. under the Public Functions section). Set it to NO to prevent +# subgrouping. Alternatively, this can be done per class using the +# \nosubgrouping command. +# The default value is: YES. + +SUBGROUPING = YES + +# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions +# are shown inside the group in which they are included (e.g. using \ingroup) +# instead of on a separate page (for HTML and Man pages) or section (for LaTeX +# and RTF). +# +# Note that this feature does not work in combination with +# SEPARATE_MEMBER_PAGES. +# The default value is: NO. + +INLINE_GROUPED_CLASSES = NO + +# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions +# with only public data fields or simple typedef fields will be shown inline in +# the documentation of the scope in which they are defined (i.e. file, +# namespace, or group documentation), provided this scope is documented. If set +# to NO, structs, classes, and unions are shown on a separate page (for HTML and +# Man pages) or section (for LaTeX and RTF). +# The default value is: NO. + +INLINE_SIMPLE_STRUCTS = NO + +# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or +# enum is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically be +# useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. +# The default value is: NO. + +TYPEDEF_HIDES_STRUCT = NO + +# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This +# cache is used to resolve symbols given their name and scope. Since this can be +# an expensive process and often the same symbol appears multiple times in the +# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small +# doxygen will become slower. If the cache is too large, memory is wasted. The +# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range +# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536 +# symbols. At the end of a run doxygen will report the cache usage and suggest +# the optimal cache size from a speed point of view. +# Minimum value: 0, maximum value: 9, default value: 0. + +LOOKUP_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in +# documentation are documented, even if no documentation was available. Private +# class members and static file members will be hidden unless the +# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES. +# Note: This will also disable the warnings about undocumented members that are +# normally produced when WARNINGS is set to YES. +# The default value is: NO. + +EXTRACT_ALL = NO + +# If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will +# be included in the documentation. +# The default value is: NO. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal +# scope will be included in the documentation. +# The default value is: NO. + +EXTRACT_PACKAGE = NO + +# If the EXTRACT_STATIC tag is set to YES, all static members of a file will be +# included in the documentation. +# The default value is: NO. + +EXTRACT_STATIC = NO + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined +# locally in source files will be included in the documentation. If set to NO, +# only classes defined in header files are included. Does not have any effect +# for Java sources. +# The default value is: YES. + +EXTRACT_LOCAL_CLASSES = YES + +# This flag is only useful for Objective-C code. If set to YES, local methods, +# which are defined in the implementation section but not in the interface are +# included in the documentation. If set to NO, only methods in the interface are +# included. +# The default value is: NO. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base name of +# the file that contains the anonymous namespace. By default anonymous namespace +# are hidden. +# The default value is: NO. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all +# undocumented members inside documented classes or files. If set to NO these +# members will be included in the various overviews, but no documentation +# section is generated. This option has no effect if EXTRACT_ALL is enabled. +# The default value is: NO. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. If set +# to NO, these classes will be included in the various overviews. This option +# has no effect if EXTRACT_ALL is enabled. +# The default value is: NO. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend +# (class|struct|union) declarations. If set to NO, these declarations will be +# included in the documentation. +# The default value is: NO. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any +# documentation blocks found inside the body of a function. If set to NO, these +# blocks will be appended to the function's detailed documentation block. +# The default value is: NO. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation that is typed after a +# \internal command is included. If the tag is set to NO then the documentation +# will be excluded. Set it to YES to include the internal documentation. +# The default value is: NO. + +INTERNAL_DOCS = NO + +# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file +# names in lower-case letters. If set to YES, upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. +# The default value is: system dependent. + +CASE_SENSE_NAMES = YES + +# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with +# their full class and namespace scopes in the documentation. If set to YES, the +# scope will be hidden. +# The default value is: NO. + +HIDE_SCOPE_NAMES = NO + +# If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will +# append additional text to a page's title, such as Class Reference. If set to +# YES the compound reference will be hidden. +# The default value is: NO. + +HIDE_COMPOUND_REFERENCE= NO + +# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of +# the files that are included by a file in the documentation of that file. +# The default value is: YES. + +SHOW_INCLUDE_FILES = YES + +# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each +# grouped member an include statement to the documentation, telling the reader +# which file to include in order to use the member. +# The default value is: NO. + +SHOW_GROUPED_MEMB_INC = NO + +# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include +# files with double quotes in the documentation rather than with sharp brackets. +# The default value is: NO. + +FORCE_LOCAL_INCLUDES = NO + +# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the +# documentation for inline members. +# The default value is: YES. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the +# (detailed) documentation of file and class members alphabetically by member +# name. If set to NO, the members will appear in declaration order. +# The default value is: YES. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief +# descriptions of file, namespace and class members alphabetically by member +# name. If set to NO, the members will appear in declaration order. Note that +# this will also influence the order of the classes in the class list. +# The default value is: NO. + +SORT_BRIEF_DOCS = NO + +# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the +# (brief and detailed) documentation of class members so that constructors and +# destructors are listed first. If set to NO the constructors will appear in the +# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS. +# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief +# member documentation. +# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting +# detailed member documentation. +# The default value is: NO. + +SORT_MEMBERS_CTORS_1ST = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy +# of group names into alphabetical order. If set to NO the group names will +# appear in their defined order. +# The default value is: NO. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by +# fully-qualified names, including namespaces. If set to NO, the class list will +# be sorted only by class name, not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the alphabetical +# list. +# The default value is: NO. + +SORT_BY_SCOPE_NAME = NO + +# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper +# type resolution of all parameters of a function it will reject a match between +# the prototype and the implementation of a member function even if there is +# only one candidate or it is obvious which candidate to choose by doing a +# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still +# accept a match between prototype and implementation in such cases. +# The default value is: NO. + +STRICT_PROTO_MATCHING = NO + +# The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo +# list. This list is created by putting \todo commands in the documentation. +# The default value is: YES. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test +# list. This list is created by putting \test commands in the documentation. +# The default value is: YES. + +GENERATE_TESTLIST = YES + +# The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug +# list. This list is created by putting \bug commands in the documentation. +# The default value is: YES. + +GENERATE_BUGLIST = YES + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO) +# the deprecated list. This list is created by putting \deprecated commands in +# the documentation. +# The default value is: YES. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional documentation +# sections, marked by \if ... \endif and \cond +# ... \endcond blocks. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the +# initial value of a variable or macro / define can have for it to appear in the +# documentation. If the initializer consists of more lines than specified here +# it will be hidden. Use a value of 0 to hide initializers completely. The +# appearance of the value of individual variables and macros / defines can be +# controlled using \showinitializer or \hideinitializer command in the +# documentation regardless of this setting. +# Minimum value: 0, maximum value: 10000, default value: 30. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at +# the bottom of the documentation of classes and structs. If set to YES, the +# list will mention the files that were used to generate the documentation. +# The default value is: YES. + +SHOW_USED_FILES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This +# will remove the Files entry from the Quick Index and from the Folder Tree View +# (if specified). +# The default value is: YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces +# page. This will remove the Namespaces entry from the Quick Index and from the +# Folder Tree View (if specified). +# The default value is: YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command command input-file, where command is the value of the +# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided +# by doxygen. Whatever the program writes to standard output is used as the file +# version. For an example see the documentation. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed +# by doxygen. The layout file controls the global structure of the generated +# output files in an output format independent way. To create the layout file +# that represents doxygen's defaults, run doxygen with the -l option. You can +# optionally specify a file name after the option, if omitted DoxygenLayout.xml +# will be used as the name of the layout file. +# +# Note that if you run doxygen from a directory containing a file called +# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE +# tag is left empty. + +LAYOUT_FILE = + +# The CITE_BIB_FILES tag can be used to specify one or more bib files containing +# the reference definitions. This must be a list of .bib files. The .bib +# extension is automatically appended if omitted. This requires the bibtex tool +# to be installed. See also http://en.wikipedia.org/wiki/BibTeX for more info. +# For LaTeX the style of the bibliography can be controlled using +# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the +# search path. See also \cite for info how to create references. + +CITE_BIB_FILES = + +#--------------------------------------------------------------------------- +# Configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated to +# standard output by doxygen. If QUIET is set to YES this implies that the +# messages are off. +# The default value is: NO. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated to standard error (stderr) by doxygen. If WARNINGS is set to YES +# this implies that the warnings are on. +# +# Tip: Turn warnings on while writing the documentation. +# The default value is: YES. + +WARNINGS = YES + +# If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate +# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag +# will automatically be disabled. +# The default value is: YES. + +WARN_IF_UNDOCUMENTED = YES + +# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some parameters +# in a documented function, or documenting parameters that don't exist or using +# markup commands wrongly. +# The default value is: YES. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that +# are documented, but have no documentation for their parameters or return +# value. If set to NO, doxygen will only warn about wrong or incomplete +# parameter documentation, but not about the absence of documentation. +# The default value is: NO. + +WARN_NO_PARAMDOC = NO + +# If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when +# a warning is encountered. +# The default value is: NO. + +WARN_AS_ERROR = NO + +# The WARN_FORMAT tag determines the format of the warning messages that doxygen +# can produce. The string should contain the $file, $line, and $text tags, which +# will be replaced by the file and line number from which the warning originated +# and the warning text. Optionally the format may contain $version, which will +# be replaced by the version of the file (if it could be obtained via +# FILE_VERSION_FILTER) +# The default value is: $file:$line: $text. + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning and error +# messages should be written. If left blank the output is written to standard +# error (stderr). + +WARN_LOGFILE = + +#--------------------------------------------------------------------------- +# Configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag is used to specify the files and/or directories that contain +# documented source files. You may enter file names like myfile.cpp or +# directories like /usr/src/myproject. Separate the files or directories with +# spaces. See also FILE_PATTERNS and EXTENSION_MAPPING +# Note: If this tag is empty the current directory is searched. + +INPUT = + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses +# libiconv (or the iconv built into libc) for the transcoding. See the libiconv +# documentation (see: http://www.gnu.org/software/libiconv) for the list of +# possible encodings. +# The default value is: UTF-8. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and +# *.h) to filter out the source-files in the directories. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# read by doxygen. +# +# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp, +# *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, +# *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, +# *.m, *.markdown, *.md, *.mm, *.dox, *.py, *.pyw, *.f90, *.f, *.for, *.tcl, +# *.vhd, *.vhdl, *.ucf, *.qsf, *.as and *.js. + +FILE_PATTERNS = + +# The RECURSIVE tag can be used to specify whether or not subdirectories should +# be searched for input files as well. +# The default value is: NO. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should be +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. +# +# Note that relative paths are relative to the directory from which doxygen is +# run. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or +# directories that are symbolic links (a Unix file system feature) are excluded +# from the input. +# The default value is: NO. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. +# +# Note that the wildcards are matched against the file with absolute path, so to +# exclude all test directories for example use the pattern */test/* + +EXCLUDE_PATTERNS = */.git/* +EXCLUDE_PATTERNS += */build/* +EXCLUDE_PATTERNS += */cmake/* +EXCLUDE_PATTERNS += */config/* +EXCLUDE_PATTERNS += */docker/* +EXCLUDE_PATTERNS += */docs/* +EXCLUDE_PATTERNS += */libs/* +EXCLUDE_PATTERNS += */release/* +EXCLUDE_PATTERNS += */Testing/* +EXCLUDE_PATTERNS += */tests/* + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test +# +# Note that the wildcards are matched against the file with absolute path, so to +# exclude all test directories use the pattern */test/* + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or directories +# that contain example code fragments that are included (see the \include +# command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and +# *.h) to filter out the source-files in the directories. If left blank all +# files are included. + +EXAMPLE_PATTERNS = + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude commands +# irrespective of the value of the RECURSIVE tag. +# The default value is: NO. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or directories +# that contain images that are to be included in the documentation (see the +# \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command: +# +# +# +# where is the value of the INPUT_FILTER tag, and is the +# name of an input file. Doxygen will then use the output that the filter +# program writes to standard output. If FILTER_PATTERNS is specified, this tag +# will be ignored. +# +# Note that the filter must not add or remove lines; it is applied before the +# code is scanned, but not when the output code is generated. If lines are added +# or removed, the anchors will not be placed correctly. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# properly processed by doxygen. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. The filters are a list of the form: pattern=filter +# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how +# filters are used. If the FILTER_PATTERNS tag is empty or if none of the +# patterns match the file name, INPUT_FILTER is applied. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# properly processed by doxygen. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will also be used to filter the input files that are used for +# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES). +# The default value is: NO. + +FILTER_SOURCE_FILES = NO + +# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file +# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and +# it is also possible to disable source filtering for a specific pattern using +# *.ext= (so without naming a filter). +# This tag requires that the tag FILTER_SOURCE_FILES is set to YES. + +FILTER_SOURCE_PATTERNS = + +# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that +# is part of the input, its contents will be placed on the main page +# (index.html). This can be useful if you have a project on for instance GitHub +# and want to reuse the introduction page also for the doxygen output. + +USE_MDFILE_AS_MAINPAGE = + +#--------------------------------------------------------------------------- +# Configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will be +# generated. Documented entities will be cross-referenced with these sources. +# +# Note: To get rid of all source code in the generated output, make sure that +# also VERBATIM_HEADERS is set to NO. +# The default value is: NO. + +SOURCE_BROWSER = NO + +# Setting the INLINE_SOURCES tag to YES will include the body of functions, +# classes and enums directly into the documentation. +# The default value is: NO. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any +# special comment blocks from generated source code fragments. Normal C, C++ and +# Fortran comments will always remain visible. +# The default value is: YES. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES then for each documented +# function all documented functions referencing it will be listed. +# The default value is: NO. + +REFERENCED_BY_RELATION = NO + +# If the REFERENCES_RELATION tag is set to YES then for each documented function +# all documented entities called/used by that function will be listed. +# The default value is: NO. + +REFERENCES_RELATION = NO + +# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set +# to YES then the hyperlinks from functions in REFERENCES_RELATION and +# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will +# link to the documentation. +# The default value is: YES. + +REFERENCES_LINK_SOURCE = YES + +# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the +# source code will show a tooltip with additional information such as prototype, +# brief description and links to the definition and documentation. Since this +# will make the HTML file larger and loading of large files a bit slower, you +# can opt to disable this feature. +# The default value is: YES. +# This tag requires that the tag SOURCE_BROWSER is set to YES. + +SOURCE_TOOLTIPS = YES + +# If the USE_HTAGS tag is set to YES then the references to source code will +# point to the HTML generated by the htags(1) tool instead of doxygen built-in +# source browser. The htags tool is part of GNU's global source tagging system +# (see http://www.gnu.org/software/global/global.html). You will need version +# 4.8.6 or higher. +# +# To use it do the following: +# - Install the latest version of global +# - Enable SOURCE_BROWSER and USE_HTAGS in the config file +# - Make sure the INPUT points to the root of the source tree +# - Run doxygen as normal +# +# Doxygen will invoke htags (and that will in turn invoke gtags), so these +# tools must be available from the command line (i.e. in the search path). +# +# The result: instead of the source browser generated by doxygen, the links to +# source code will now point to the output of htags. +# The default value is: NO. +# This tag requires that the tag SOURCE_BROWSER is set to YES. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a +# verbatim copy of the header file for each class for which an include is +# specified. Set to NO to disable this. +# See also: Section \class. +# The default value is: YES. + +VERBATIM_HEADERS = YES + +# If the CLANG_ASSISTED_PARSING tag is set to YES then doxygen will use the +# clang parser (see: http://clang.llvm.org/) for more accurate parsing at the +# cost of reduced performance. This can be particularly helpful with template +# rich C++ code for which doxygen's built-in parser lacks the necessary type +# information. +# Note: The availability of this option depends on whether or not doxygen was +# generated with the -Duse-libclang=ON option for CMake. +# The default value is: NO. + +CLANG_ASSISTED_PARSING = NO + +# If clang assisted parsing is enabled you can provide the compiler with command +# line options that you would normally use when invoking the compiler. Note that +# the include paths will already be set by doxygen for the files and directories +# specified with INPUT and INCLUDE_PATH. +# This tag requires that the tag CLANG_ASSISTED_PARSING is set to YES. + +CLANG_OPTIONS = + +#--------------------------------------------------------------------------- +# Configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all +# compounds will be generated. Enable this if the project contains a lot of +# classes, structs, unions or interfaces. +# The default value is: YES. + +ALPHABETICAL_INDEX = YES + +# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in +# which the alphabetical index list will be split. +# Minimum value: 1, maximum value: 20, default value: 5. +# This tag requires that the tag ALPHABETICAL_INDEX is set to YES. + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all classes will +# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag +# can be used to specify a prefix (or a list of prefixes) that should be ignored +# while generating the index headers. +# This tag requires that the tag ALPHABETICAL_INDEX is set to YES. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output +# The default value is: YES. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a +# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of +# it. +# The default directory is: html. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_OUTPUT = html + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each +# generated HTML page (for example: .htm, .php, .asp). +# The default value is: .html. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a user-defined HTML header file for +# each generated HTML page. If the tag is left blank doxygen will generate a +# standard header. +# +# To get valid HTML the header file that includes any scripts and style sheets +# that doxygen needs, which is dependent on the configuration options used (e.g. +# the setting GENERATE_TREEVIEW). It is highly recommended to start with a +# default header using +# doxygen -w html new_header.html new_footer.html new_stylesheet.css +# YourConfigFile +# and then modify the file new_header.html. See also section "Doxygen usage" +# for information on how to generate the default header that doxygen normally +# uses. +# Note: The header is subject to change so you typically have to regenerate the +# default header when upgrading to a newer version of doxygen. For a description +# of the possible markers and block names see the documentation. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_HEADER = + +# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each +# generated HTML page. If the tag is left blank doxygen will generate a standard +# footer. See HTML_HEADER for more information on how to generate a default +# footer and what special commands can be used inside the footer. See also +# section "Doxygen usage" for information on how to generate the default footer +# that doxygen normally uses. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FOOTER = + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style +# sheet that is used by each HTML page. It can be used to fine-tune the look of +# the HTML output. If left blank doxygen will generate a default style sheet. +# See also section "Doxygen usage" for information on how to generate the style +# sheet that doxygen normally uses. +# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as +# it is more robust and this tag (HTML_STYLESHEET) will in the future become +# obsolete. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_STYLESHEET = + +# The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined +# cascading style sheets that are included after the standard style sheets +# created by doxygen. Using this option one can overrule certain style aspects. +# This is preferred over using HTML_STYLESHEET since it does not replace the +# standard style sheet and is therefore more robust against future updates. +# Doxygen will copy the style sheet files to the output directory. +# Note: The order of the extra style sheet files is of importance (e.g. the last +# style sheet in the list overrules the setting of the previous ones in the +# list). For an example see the documentation. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_EXTRA_STYLESHEET = + +# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or +# other source files which should be copied to the HTML output directory. Note +# that these files will be copied to the base HTML output directory. Use the +# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these +# files. In the HTML_STYLESHEET file, use the file name only. Also note that the +# files will be copied as-is; there are no commands or markers available. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_EXTRA_FILES = + +# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen +# will adjust the colors in the style sheet and background images according to +# this color. Hue is specified as an angle on a colorwheel, see +# http://en.wikipedia.org/wiki/Hue for more information. For instance the value +# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300 +# purple, and 360 is red again. +# Minimum value: 0, maximum value: 359, default value: 220. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_HUE = 28 + +# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors +# in the HTML output. For a value of 0 the output will use grayscales only. A +# value of 255 will produce the most vivid colors. +# Minimum value: 0, maximum value: 255, default value: 100. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_SAT = 250 + +# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the +# luminance component of the colors in the HTML output. Values below 100 +# gradually make the output lighter, whereas values above 100 make the output +# darker. The value divided by 100 is the actual gamma applied, so 80 represents +# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not +# change the gamma. +# Minimum value: 40, maximum value: 240, default value: 80. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_GAMMA = 80 + +# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML +# page will contain the date and time when the page was generated. Setting this +# to YES can help to show when doxygen was last run and thus if the +# documentation is up to date. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_TIMESTAMP = NO + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_DYNAMIC_SECTIONS = NO + +# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries +# shown in the various tree structured indices initially; the user can expand +# and collapse entries dynamically later on. Doxygen will expand the tree to +# such a level that at most the specified number of entries are visible (unless +# a fully collapsed tree already exceeds this amount). So setting the number of +# entries 1 will produce a full collapsed tree by default. 0 is a special value +# representing an infinite number of entries and will result in a full expanded +# tree by default. +# Minimum value: 0, maximum value: 9999, default value: 100. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_INDEX_NUM_ENTRIES = 100 + +# If the GENERATE_DOCSET tag is set to YES, additional index files will be +# generated that can be used as input for Apple's Xcode 3 integrated development +# environment (see: http://developer.apple.com/tools/xcode/), introduced with +# OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a +# Makefile in the HTML output directory. Running make will produce the docset in +# that directory and running make install will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at +# startup. See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html +# for more information. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_DOCSET = NO + +# This tag determines the name of the docset feed. A documentation feed provides +# an umbrella under which multiple documentation sets from a single provider +# (such as a company or product suite) can be grouped. +# The default value is: Doxygen generated docs. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# This tag specifies a string that should uniquely identify the documentation +# set bundle. This should be a reverse domain-name style string, e.g. +# com.mycompany.MyDocSet. Doxygen will append .docset to the name. +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify +# the documentation publisher. This should be a reverse domain-name style +# string, e.g. com.mycompany.MyDocSet.documentation. +# The default value is: org.doxygen.Publisher. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_PUBLISHER_ID = org.doxygen.Publisher + +# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher. +# The default value is: Publisher. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_PUBLISHER_NAME = Publisher + +# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three +# additional HTML index files: index.hhp, index.hhc, and index.hhk. The +# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop +# (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on +# Windows. +# +# The HTML Help Workshop contains a compiler that can convert all HTML output +# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML +# files are now used as the Windows 98 help format, and will replace the old +# Windows help format (.hlp) on all Windows platforms in the future. Compressed +# HTML files also contain an index, a table of contents, and you can search for +# words in the documentation. The HTML workshop also contains a viewer for +# compressed HTML files. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_HTMLHELP = NO + +# The CHM_FILE tag can be used to specify the file name of the resulting .chm +# file. You can add a path in front of the file if the result should not be +# written to the html output directory. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +CHM_FILE = + +# The HHC_LOCATION tag can be used to specify the location (absolute path +# including file name) of the HTML help compiler (hhc.exe). If non-empty, +# doxygen will try to run the HTML help compiler on the generated index.hhp. +# The file has to be specified with full path. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +HHC_LOCATION = + +# The GENERATE_CHI flag controls if a separate .chi index file is generated +# (YES) or that it should be included in the master .chm file (NO). +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +GENERATE_CHI = NO + +# The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc) +# and project file content. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +CHM_INDEX_ENCODING = + +# The BINARY_TOC flag controls whether a binary table of contents is generated +# (YES) or a normal table of contents (NO) in the .chm file. Furthermore it +# enables the Previous and Next buttons. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +BINARY_TOC = NO + +# The TOC_EXPAND flag can be set to YES to add extra items for group members to +# the table of contents of the HTML help documentation and to the tree view. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +TOC_EXPAND = NO + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and +# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that +# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help +# (.qch) of the generated HTML documentation. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify +# the file name of the resulting .qch file. The path specified is relative to +# the HTML output folder. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help +# Project output. For more information please see Qt Help Project / Namespace +# (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#namespace). +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_NAMESPACE = org.doxygen.Project + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt +# Help Project output. For more information please see Qt Help Project / Virtual +# Folders (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#virtual- +# folders). +# The default value is: doc. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_VIRTUAL_FOLDER = doc + +# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom +# filter to add. For more information please see Qt Help Project / Custom +# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom- +# filters). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the +# custom filter to add. For more information please see Qt Help Project / Custom +# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom- +# filters). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this +# project's filter section matches. Qt Help Project / Filter Attributes (see: +# http://qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_SECT_FILTER_ATTRS = + +# The QHG_LOCATION tag can be used to specify the location of Qt's +# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the +# generated .qhp file. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHG_LOCATION = + +# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be +# generated, together with the HTML files, they form an Eclipse help plugin. To +# install this plugin and make it available under the help contents menu in +# Eclipse, the contents of the directory containing the HTML and XML files needs +# to be copied into the plugins directory of eclipse. The name of the directory +# within the plugins directory should be the same as the ECLIPSE_DOC_ID value. +# After copying Eclipse needs to be restarted before the help appears. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_ECLIPSEHELP = NO + +# A unique identifier for the Eclipse help plugin. When installing the plugin +# the directory name containing the HTML and XML files should also have this +# name. Each documentation set should have its own identifier. +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES. + +ECLIPSE_DOC_ID = org.doxygen.Project + +# If you want full control over the layout of the generated HTML pages it might +# be necessary to disable the index and replace it with your own. The +# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top +# of each HTML page. A value of NO enables the index and the value YES disables +# it. Since the tabs in the index contain the same information as the navigation +# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +DISABLE_INDEX = NO + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. If the tag +# value is set to YES, a side panel will be generated containing a tree-like +# index structure (just like the one that is generated for HTML Help). For this +# to work a browser that supports JavaScript, DHTML, CSS and frames is required +# (i.e. any modern browser). Windows users are probably better off using the +# HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can +# further fine-tune the look of the index. As an example, the default style +# sheet generated by doxygen has an example that shows how to put an image at +# the root of the tree instead of the PROJECT_NAME. Since the tree basically has +# the same information as the tab index, you could consider setting +# DISABLE_INDEX to YES when enabling this option. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_TREEVIEW = NO + +# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that +# doxygen will group on one line in the generated HTML documentation. +# +# Note that a value of 0 will completely suppress the enum values from appearing +# in the overview section. +# Minimum value: 0, maximum value: 20, default value: 4. +# This tag requires that the tag GENERATE_HTML is set to YES. + +ENUM_VALUES_PER_LINE = 4 + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used +# to set the initial width (in pixels) of the frame in which the tree is shown. +# Minimum value: 0, maximum value: 1500, default value: 250. +# This tag requires that the tag GENERATE_HTML is set to YES. + +TREEVIEW_WIDTH = 250 + +# If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to +# external symbols imported via tag files in a separate window. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +EXT_LINKS_IN_WINDOW = NO + +# Use this tag to change the font size of LaTeX formulas included as images in +# the HTML documentation. When you change the font size after a successful +# doxygen run you need to manually remove any form_*.png images from the HTML +# output directory to force them to be regenerated. +# Minimum value: 8, maximum value: 50, default value: 10. +# This tag requires that the tag GENERATE_HTML is set to YES. + +FORMULA_FONTSIZE = 10 + +# Use the FORMULA_TRANPARENT tag to determine whether or not the images +# generated for formulas are transparent PNGs. Transparent PNGs are not +# supported properly for IE 6.0, but are supported on all modern browsers. +# +# Note that when changing this option you need to delete any form_*.png files in +# the HTML output directory before the changes have effect. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +FORMULA_TRANSPARENT = YES + +# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see +# http://www.mathjax.org) which uses client side Javascript for the rendering +# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX +# installed or if you want to formulas look prettier in the HTML output. When +# enabled you may also need to install MathJax separately and configure the path +# to it using the MATHJAX_RELPATH option. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +USE_MATHJAX = NO + +# When MathJax is enabled you can set the default output format to be used for +# the MathJax output. See the MathJax site (see: +# http://docs.mathjax.org/en/latest/output.html) for more details. +# Possible values are: HTML-CSS (which is slower, but has the best +# compatibility), NativeMML (i.e. MathML) and SVG. +# The default value is: HTML-CSS. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_FORMAT = HTML-CSS + +# When MathJax is enabled you need to specify the location relative to the HTML +# output directory using the MATHJAX_RELPATH option. The destination directory +# should contain the MathJax.js script. For instance, if the mathjax directory +# is located at the same level as the HTML output directory, then +# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax +# Content Delivery Network so you can quickly see the result without installing +# MathJax. However, it is strongly recommended to install a local copy of +# MathJax from http://www.mathjax.org before deployment. +# The default value is: http://cdn.mathjax.org/mathjax/latest. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest + +# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax +# extension names that should be enabled during MathJax rendering. For example +# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_EXTENSIONS = + +# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces +# of code that will be used on startup of the MathJax code. See the MathJax site +# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an +# example see the documentation. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_CODEFILE = + +# When the SEARCHENGINE tag is enabled doxygen will generate a search box for +# the HTML output. The underlying search engine uses javascript and DHTML and +# should work on any modern browser. Note that when using HTML help +# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET) +# there is already a search function so this one should typically be disabled. +# For large projects the javascript based search engine can be slow, then +# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to +# search using the keyboard; to jump to the search box use + S +# (what the is depends on the OS and browser, but it is typically +# , /