This commit is contained in:
sale 2016-12-19 10:05:38 +00:00
commit 021da26ac9
32 changed files with 941 additions and 504 deletions

View File

@ -1,82 +1,55 @@
# MemGraph CMake configuration
cmake_minimum_required(VERSION 3.1)
# get directory name
get_filename_component(ProjectId ${CMAKE_SOURCE_DIR} NAME)
# replace whitespaces with underscores
string(REPLACE " " "_" ProjectId ${ProjectId})
# !! IMPORTANT !! run ./project_root/init.sh before cmake command
# to download dependencies
# choose a compiler
# NOTE: must be choosen before use of project() or enable_language()
if (UNIX)
set(CMAKE_C_COMPILER "clang")
set(CMAKE_CXX_COMPILER "clang++")
endif (UNIX)
# -----------------------------------------------------------------------------
# set project name
project(${ProjectId})
# get directory name
get_filename_component(project_name ${CMAKE_SOURCE_DIR} NAME)
# replace whitespaces with underscores
string(REPLACE " " "_" project_name ${project_name})
# set project name
project(${project_name})
# -----------------------------------------------------------------------------
# setup CMake module path, defines path for include() and find_package()
# https://cmake.org/cmake/help/latest/variable/CMAKE_MODULE_PATH.html
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${PROJECT_SOURCE_DIR}/cmake)
# -----------------------------------------------------------------------------
# custom function definitions
include(functions)
# -----------------------------------------------------------------------------
# threading
find_package(Threads REQUIRED)
# flags
# -----------------------------------------------------------------------------
# c++14
set(cxx_standard 14)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++1y")
# -----------------------------------------------------------------------------
# functions
# prints all included directories
function(list_includes)
get_property(dirs DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
PROPERTY INCLUDE_DIRECTORIES)
foreach(dir ${dirs})
message(STATUS "dir='${dir}'")
endforeach()
endfunction(list_includes)
# get file names from list of file paths
function(get_file_names file_paths file_names)
set(file_names "")
foreach(file_path ${file_paths})
get_filename_component (file_name ${file_path} NAME_WE)
list(APPEND file_names ${file_name})
endforeach()
set(file_names "${file_names}" PARENT_SCOPE)
endfunction()
MACRO(SUBDIRLIST result curdir)
FILE(GLOB children RELATIVE ${curdir} ${curdir}/*)
SET(dirlist "")
FOREACH(child ${children})
IF(IS_DIRECTORY ${curdir}/${child})
LIST(APPEND dirlist ${child})
ENDIF()
ENDFOREACH()
SET(${result} ${dirlist})
ENDMACRO()
# custom targets
# move test data data to the build directory
if (UNIX)
set(test_data "tests/data")
set(test_data_src "${CMAKE_SOURCE_DIR}/${test_data}")
set(test_data_dst "${CMAKE_BINARY_DIR}/${test_data}")
add_custom_target (test_data
COMMAND rm -rf ${test_data_dst}
COMMAND cp -r ${test_data_src} ${test_data_dst}
)
endif (UNIX)
# external dependencies
# dir variables
set(src_dir ${CMAKE_SOURCE_DIR}/src)
set(libs_dir ${CMAKE_SOURCE_DIR}/libs)
set(include_dir ${CMAKE_SOURCE_DIR}/include)
set(build_include_dir ${CMAKE_BINARY_DIR}/include)
set(test_include_dir ${CMAKE_BINARY_DIR}/tests/include)
set(test_src_dir ${CMAKE_BINARY_DIR}/tests/src)
# -----------------------------------------------------------------------------
# setup external dependencies
# !! IMPORTANT !! run ./libs/setup.sh before cmake command
# TODO: run from execute_process
# lemon & lempar
set(lemon_dir ${libs_dir}/lemon)
# lexertl
@ -90,14 +63,17 @@ set(yaml_include_dir ${yaml_source_dir}/include)
set(yaml_static_lib ${yaml_source_dir}/libyaml-cpp.a)
# Catch (C++ Automated Test Cases in Headers)
set(catch_source_dir "${libs_dir}/Catch")
# -----------------------------------------------------------------------------
# load cmake modules: cmake/*.cmake
include(gtest)
include(gbenchmark)
# -----------------------------------------------------------------------------
# build memgraph's cypher grammar
# copy grammar file to the build directory
FILE(COPY ${include_dir}/query/language/cypher/cypher.y DESTINATION ${CMAKE_BINARY_DIR})
FILE(COPY ${include_dir}/query/language/cypher/cypher.y
DESTINATION ${CMAKE_BINARY_DIR})
# build cypher parser (only c file - cypher.c)
EXECUTE_PROCESS(
COMMAND ${lemon_dir}/lemon ${CMAKE_BINARY_DIR}/cypher.y -s
@ -109,34 +85,22 @@ FILE(RENAME ${CMAKE_BINARY_DIR}/cypher.c ${CMAKE_BINARY_DIR}/cypher.cpp)
SET(cypher_build_include_dir ${build_include_dir}/cypher)
FILE(MAKE_DIRECTORY ${cypher_build_include_dir})
FILE(RENAME ${CMAKE_BINARY_DIR}/cypher.h ${cypher_build_include_dir}/cypher.h)
# -----------------------------------------------------------------------------
# prepare template and destination folders for query engine (tests)
# and memgraph server binary
# copy query_engine's templates file
FILE(COPY ${src_dir}/query_engine/template DESTINATION ${CMAKE_BINARY_DIR}/tests)
FILE(COPY ${src_dir}/query_engine/template
DESTINATION ${CMAKE_BINARY_DIR}/tests)
FILE(COPY ${src_dir}/query_engine/template DESTINATION ${CMAKE_BINARY_DIR})
# create destination folder for compiled queries
FILE(MAKE_DIRECTORY ${CMAKE_BINARY_DIR}/tests/compiled/cpu)
FILE(MAKE_DIRECTORY ${CMAKE_BINARY_DIR}/compiled/cpu)
# -----------------------------------------------------------------------------
# TODO: filter header files, all files don't need to be copied
# they are all copied because query engine needs header files during
# query compilation
# TODO: make a function (REMOVE copy pasted part)
# SUBDIRLIST(source_folders ${src_dir})
# foreach(source_folder ${source_folders})
# file(COPY ${src_dir}/${source_folder} DESTINATION ${build_include_dir})
# endforeach()
SUBDIRLIST(source_folders ${src_dir})
foreach(source_folder ${source_folders})
file(COPY ${src_dir}/${source_folder} DESTINATION ${test_src_dir})
endforeach()
SUBDIRLIST(source_folders ${include_dir})
foreach(source_foler ${source_folders})
file(COPY ${include_dir}/${source_folder} DESTINATION ${test_include_dir})
endforeach()
# copy files needed for query engine (headers)
include(copy_includes)
# -----------------------------------------------------------------------------
# linter setup (clang-tidy)
# all source files for linting
@ -146,7 +110,6 @@ FILE(GLOB_RECURSE LINTER_SRC_FILES
${CMAKE_SOURCE_DIR}/poc/.cpp
)
MESSAGE(STATUS "All cpp files for linting are: ${LINTER_SRC_FILES}")
# linter target clang-tidy
find_program(CLANG_TIDY "clang-tidy")
if(CLANG_TIDY)
@ -160,24 +123,29 @@ if(CLANG_TIDY)
-I${CMAKE_SOURCE_DIR}/include -I${fmt_source_dir} -I${yaml_include_dir}
)
endif()
# linter setup
# -----------------------------------------------------------------------------
# debug flags
# TODO: add specific flags
# release flags
set(CMAKE_CXX_FLAGS_RELEASE "-O2 -DNDEBUG")
#debug flags
set(CMAKE_CXX_FLAGS_DEBUG "-g")
# compiler specific flags
if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang")
# set(CMAKE_CXX_FLAGS_DEBUG "-Wl,--export-dynamic ${CMAKE_CXX_FLAGS_DEBUG}")
elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")
# set(CMAKE_CXX_FLAGS_DEBUG "-rdynamic ${CMAKE_CXX_FLAGS_DEBUG}")
endif()
# release flags
set(CMAKE_CXX_FLAGS_RELEASE "-O2 ${CMAKE_CXX_FLAGS_RELEASE}")
# default build type is debug
if ("${CMAKE_BUILD_TYPE}" STREQUAL "")
set(CMAKE_BUILD_TYPE "debug")
endif()
message(STATUS "CMake build type: ${CMAKE_BUILD_TYPE}")
# -----------------------------------------------------------------------------
#debug flags
set(CMAKE_CXX_FLAGS_DEBUG "-g2 ${CMAKE_CXX_FLAGS_DEBUG}")
# TODO: find a way how to applay the defines at the query compile time
# -- configure defines -- default is ON | true | enabled ----------------------
# -- logging ------------------------------------------------------------------
# logging levels
option(LOG_NO_TRACE "Disable trace logging" OFF)
message(STATUS "LOG_NO_TRACE: ${LOG_NO_TRACE}")
if (LOG_NO_TRACE)
@ -207,15 +175,20 @@ message(STATUS "LOG_NO_ERROR: ${LOG_NO_ERROR}")
if (LOG_NO_ERROR)
add_definitions(-DLOG_NO_ERROR)
endif()
# -- logging ------------------------------------------------------------------
# -- logger -------------------------------------------------------------------
option(SYNC_LOGGER "" OFF)
message(STATUS "SYNC LOGGER: ${SYNC_LOGGER}")
# TODO: find a way how to applay those defines at the query compile time
# -----------------------------------------------------------------------------
# logger type
# the default logger is sync logger
# on: cmake ... -DSYNC_LOGGER=OFF ... async logger is going to be used
option(SYNC_LOGGER "Sync logger" ON)
message(STATUS "SYNC_LOGGER: ${SYNC_LOGGER}")
if (SYNC_LOGGER)
add_definitions(-DSYNC_LOGGER)
endif()
# -- logger -------------------------------------------------------------------
# -- assert -------------------------------------------------------------------
# -----------------------------------------------------------------------------
# assert
option(RUNTIME_ASSERT "Enable runtime assertions" ON)
message(STATUS "RUNTIME_ASSERT: ${RUNTIME_ASSERT}")
if(RUNTIME_ASSERT)
@ -227,39 +200,52 @@ message(STATUS "THROW_EXCEPTION_ON_ERROR: ${THROW_EXCEPTION_ON_ERROR}")
if(THROW_EXCEPTION_ON_ERROR)
add_definitions(-DTHROW_EXCEPTION_ON_ERROR)
endif()
# -- assert -------------------------------------------------------------------
# -- ndebug -------------------------------------------------------------------
# -----------------------------------------------------------------------------
# ndebug
option(NDEBUG "No debug" OFF)
message(STATUS "NDEBUG: ${NDEBUG} (be careful CMAKE_BUILD_TYPE can also append this flag)")
message(STATUS "NDEBUG: ${NDEBUG} (be careful CMAKE_BUILD_TYPE can also \
append this flag)")
if(NDEBUG)
add_definitions( -DNDEBUG )
endif()
# -- ndebug -------------------------------------------------------------------
# -----------------------------------------------------------------------------
# -- GLIBCXX_DEBUG ------------------------------------------------------------
# glibcxx debug (useful for gdb)
# the problem is that the query engine doesn't work as it should work if
# this flag is present
# this flag is present (TODO: figure out why)
option(GLIBCXX_DEBUG "glibc debug" OFF)
message(STATUS "GLIBCXX_DEBUG: ${GLIBCXX_DEBUG} (solves problem with _M_dataplus member during a debugging process")
message(STATUS "GLIBCXX_DEBUG: ${GLIBCXX_DEBUG} (solves problem with \
_M_dataplus member during a debugging process)")
if(GLIBCXX_DEBUG)
set(CMAKE_CXX_FLAGS_DEBUG "-D_GLIBCXX_DEBUG ${CMAKE_CXX_FLAGS_DEBUG}")
endif()
# -----------------------------------------------------------------------------
# -- binaries -----------------------------------------------------------------
# option binaries
# memgraph
option(MEMGRAPH "Build memgraph binary" ON)
message(STATUS "MEMGRAPH binary: ${MEMGRAPH}")
# proof of concept
option(POC "Build proof of concept binaries" ON)
message(STATUS "POC binaries: ${POC}")
option(TOOLS "Build tool executables" ON)
message(STATUS "TOOLS binaries: ${TOOLS}")
option(TESTS "Build test binaries" ON)
message(STATUS "TESTS binaries: ${TESTS}")
option(BENCHMARK "Build benchmark binaries" ON)
message(STATUS "BENCHMARK binaries: ${BENCHMARK}")
# -- binaries -----------------------------------------------------------------
# -- configure defines --------------------------------------------------------
# tests
option(ALL_TESTS "Add all test binaries" ON)
message(STATUS "Add all test binaries: ${ALL_TESTS}")
option(BENCHMARK_TESTS "Add benchmark test binaries" OFF)
message(STATUS "Add benchmark test binaries: ${BENCHMARK_TESTS}")
option(CONCURRENT_TESTS "Add concurrent test binaries" OFF)
message(STATUS "Add concurrent test binaries: ${CONCURRENT_TESTS}")
option(INTEGRATION_TESTS "Add integration test binaries" OFF)
message(STATUS "Add integration test binaries: ${INTEGRATION_TESTS}")
option(MANUAL_TESTS "Add manual test binaries" OFF)
message(STATUS "Add manual test binaries: ${MANUAL_TESTS}")
option(UNIT_TESTS "Add unit test binaries" OFF)
message(STATUS "Add unit test binaries: ${UNIT_TESTS}")
# -----------------------------------------------------------------------------
# -- includes -----------------------------------------------------------------
# includes
include_directories(${CMAKE_SOURCE_DIR}/include)
include_directories(${src_dir})
include_directories(${build_include_dir})
@ -274,14 +260,17 @@ include_directories(${r3_source_dir}/include)
# creates build/libcypher_lib.a
add_library(cypher_lib STATIC ${CMAKE_BINARY_DIR}/cypher.cpp)
# -----------------------------------------------------------------------------
# REST API preprocessor
EXECUTE_PROCESS(
COMMAND python link_resources.py
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/src/api
)
# TODO: remove from here (isolate HTTP server)
# # REST API preprocessor
# EXECUTE_PROCESS(
# COMMAND python link_resources.py
# WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/src/api
# )
# # ---------------------------------------------------------------------------
# TODO: create separate static library from bolt code
# all memgraph src files
set(memgraph_src_files
${src_dir}/config/config.cpp
${src_dir}/dbms/dbms.cpp
@ -353,29 +342,29 @@ set(memgraph_src_files
${src_dir}/storage/edge_accessor.cpp
${src_dir}/storage/record_accessor.cpp
)
# -----------------------------------------------------------------------------
# STATIC library used by memgraph executables
add_library(memgraph STATIC ${memgraph_src_files})
add_library(memgraph_lib STATIC ${memgraph_src_files})
# -----------------------------------------------------------------------------
# STATIC PIC library used by query engine
add_library(memgraph_pic STATIC ${memgraph_src_files})
set_property(TARGET memgraph_pic PROPERTY POSITION_INDEPENDENT_CODE TRUE)
# tests
if (TESTS)
enable_testing()
add_subdirectory(tests)
endif()
# -----------------------------------------------------------------------------
# proof of concepts
if (POC)
add_subdirectory(poc)
endif()
# -----------------------------------------------------------------------------
# benchmark binaries
if (BENCHMARK)
add_subdirectory(${PROJECT_SOURCE_DIR}/tests/benchmark)
# tests
if (ALL_TESTS OR BENCHMARK_TESTS OR CONCURRENT_TEST OR INTEGRATION_TEST
OR MANUAL_TESTS OR UNIT_TESTS)
add_subdirectory(tests)
endif()
# -----------------------------------------------------------------------------
# memgraph build name
execute_process(
@ -395,19 +384,17 @@ string(STRIP ${COMMIT_NO} COMMIT_NO)
string(STRIP ${COMMIT_HASH} COMMIT_HASH)
set(MEMGRAPH_BUILD_NAME
"memgraph_${COMMIT_NO}_${COMMIT_HASH}_${COMMIT_BRANCH}_${CMAKE_BUILD_TYPE}")
message(STATUS "CMake build type: ${CMAKE_BUILD_TYPE}")
message(STATUS "Debug flags: ${CMAKE_CXX_FLAGS_DEBUG}")
message(STATUS "Release flags: ${CMAKE_CXX_FLAGS_RELEASE}")
# -----------------------------------------------------------------------------
# memgraph main executable
if (MEMGRAPH)
add_executable(${MEMGRAPH_BUILD_NAME} ${src_dir}/memgraph_bolt.cpp)
target_link_libraries(${MEMGRAPH_BUILD_NAME} memgraph)
target_link_libraries(${MEMGRAPH_BUILD_NAME} memgraph_lib)
target_link_libraries(${MEMGRAPH_BUILD_NAME} stdc++fs)
target_link_libraries(${MEMGRAPH_BUILD_NAME} Threads::Threads)
target_link_libraries(${MEMGRAPH_BUILD_NAME} cypher_lib)
if (UNIX)
target_link_libraries(${MEMGRAPH_BUILD_NAME} crypto)
# target_link_libraries(${MEMGRAPH_BUILD_NAME} ssl)
@ -416,3 +403,4 @@ if (MEMGRAPH)
target_link_libraries(${MEMGRAPH_BUILD_NAME} dl)
endif (UNIX)
endif()
# -----------------------------------------------------------------------------

View File

@ -132,7 +132,6 @@ FILE(COPY ${include_dir}/utils/counters/atomic_counter.hpp DESTINATION ${build_i
FILE(COPY ${include_dir}/utils/counters/simple_counter.hpp DESTINATION ${build_include_dir}/utils/counters)
FILE(COPY ${include_dir}/utils/random/fast_binomial.hpp DESTINATION ${build_include_dir}/utils/random)
FILE(COPY ${include_dir}/utils/random/xorshift128plus.hpp DESTINATION ${build_include_dir}/utils/random)
FILE(COPY ${include_dir}/utils/exceptions/basic_exception.hpp DESTINATION ${build_include_dir}/utils/exceptions)
FILE(COPY ${include_dir}/utils/datetime/timestamp.hpp DESTINATION ${build_include_dir}/utils/datetime)
FILE(COPY ${include_dir}/utils/datetime/datetime_error.hpp DESTINATION ${build_include_dir}/utils/datetime)
FILE(COPY ${include_dir}/utils/types/byte.hpp DESTINATION ${build_include_dir}/utils/types)
@ -141,6 +140,10 @@ FILE(COPY ${include_dir}/utils/option.hpp DESTINATION ${build_include_dir}/utils
FILE(COPY ${include_dir}/utils/border.hpp DESTINATION ${build_include_dir}/utils)
FILE(COPY ${include_dir}/utils/order.hpp DESTINATION ${build_include_dir}/utils)
FILE(COPY ${include_dir}/utils/numerics/saturate.hpp DESTINATION ${build_include_dir}/utils/numerics)
FILE(COPY ${include_dir}/utils/memory/stack_allocator.hpp DESTINATION ${build_include_dir}/utils/memory)
FILE(COPY ${include_dir}/utils/memory/block_allocator.hpp DESTINATION ${build_include_dir}/utils/memory)
FILE(COPY ${include_dir}/utils/exceptions/basic_exception.hpp DESTINATION ${build_include_dir}/utils/exceptions)
FILE(COPY ${include_dir}/utils/exceptions/out_of_memory.hpp DESTINATION ${build_include_dir}/utils/exceptions)
FILE(COPY ${include_dir}/utils/iterator/iterator_base.hpp DESTINATION ${build_include_dir}/utils/iterator)
FILE(COPY ${include_dir}/utils/iterator/virtual_iter.hpp DESTINATION ${build_include_dir}/utils/iterator)

29
cmake/functions.cmake Normal file
View File

@ -0,0 +1,29 @@
# prints all included directories
function(list_includes)
get_property(dirs DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
PROPERTY INCLUDE_DIRECTORIES)
foreach(dir ${dirs})
message(STATUS "dir='${dir}'")
endforeach()
endfunction(list_includes)
# get file names from list of file paths
function(get_file_names file_paths file_names)
set(file_names "")
foreach(file_path ${file_paths})
get_filename_component (file_name ${file_path} NAME_WE)
list(APPEND file_names ${file_name})
endforeach()
set(file_names "${file_names}" PARENT_SCOPE)
endfunction()
MACRO(SUBDIRLIST result curdir)
FILE(GLOB children RELATIVE ${curdir} ${curdir}/*)
SET(dirlist "")
FOREACH(child ${children})
IF(IS_DIRECTORY ${curdir}/${child})
LIST(APPEND dirlist ${child})
ENDIF()
ENDFOREACH()
SET(${result} ${dirlist})
ENDMACRO()

View File

@ -1,5 +1,7 @@
#pragma once
#include <experimental/filesystem>
#include "database/db.hpp"
#include "logging/default.hpp"
#include "query/exception/query_engine.hpp"
@ -13,6 +15,8 @@
* -> [code_compiler] -> code_executor
*/
namespace fs = std::experimental::filesystem;
// query engine has to be aware of the Stream because Stream
// is passed to the dynamic shared library
template <typename Stream>
@ -44,6 +48,17 @@ public:
}
}
// preload functionality
auto load(const uint64_t hash, const fs::path& path)
{
program_loader.load(hash, path);
}
auto load(const std::string& query)
{
program_loader.load(query);
}
protected:
Logger logger;

View File

@ -20,9 +20,6 @@ public:
std::string flags;
// TODO: sync this with cmake configuration
#ifdef BARRIER
flags += " -DBARRIER";
#endif
#ifdef NDEBUG
flags += " -DNDEBUG -O2";
#endif
@ -53,9 +50,6 @@ public:
"-I../include",
"-I../libs/fmt", // TODO: load from config
"-I../../libs/fmt", "-L./ -L../",
#ifdef BARRIER
"-lbarrier_pic",
#endif
"-lmemgraph_pic",
"-shared -fPIC" // shared library flags
);
@ -67,6 +61,8 @@ public:
// if compilation has failed throw exception
if (compile_status == -1) {
logger.debug("FAIL: Query Code Compilation: {} -> {}", in_file,
out_file);
throw PlanCompilationException(
"Code compilation error. Generated code is not compilable or "
"compilation settings are wrong");

View File

@ -3,6 +3,7 @@
#include <memory>
#include <string>
#include <unordered_map>
#include <experimental/filesystem>
#include "config/config.hpp"
#include "logging/default.hpp"
@ -16,6 +17,8 @@
#include "utils/file.hpp"
#include "utils/hashing/fnv.hpp"
namespace fs = std::experimental::filesystem;
template <typename Stream>
class ProgramLoader
{
@ -26,6 +29,16 @@ public:
ProgramLoader() : logger(logging::log->logger("PlanLoader")) {}
// TODO: decouple load(query) method
auto load(const uint64_t hash, const fs::path &path)
{
// TODO: get lib path (that same folder as path folder or from config)
// TODO: compile
// TODO: dispose the old lib
// TODO: store the compiled lib
}
auto load(const std::string &query)
{
auto preprocessed = preprocessor.preprocess(query);

View File

@ -8,8 +8,6 @@
auto VertexAccessor::out() const
{
DbTransaction &t = this->db;
std::cout << "VA OUT" << std::endl;
std::cout << record->data.out.size() << std::endl;
return iter::make_map(iter::make_iter_ref(record->data.out),
[&](auto e) -> auto { return EdgeAccessor(*e, t); });
}

View File

@ -6,4 +6,6 @@ class NotYetImplemented : public BasicException
{
public:
using BasicException::BasicException;
NotYetImplemented() : BasicException("") {}
};

View File

@ -1,45 +1,37 @@
cmake_minimum_required(VERSION 3.1)
find_package(Threads REQUIRED)
project(memgraph_poc)
# get all cpp abs file names recursively starting from current directory
file(GLOB poc_cpps *.cpp)
message(STATUS "Available poc cpp files are: ${poc_cpps}")
include_directories(${CMAKE_SOURCE_DIR}/poc)
include_directories(${CMAKE_SOURCE_DIR}/queries)
add_executable(poc_astar astar.cpp)
target_link_libraries(poc_astar memgraph)
target_link_libraries(poc_astar Threads::Threads)
target_link_libraries(poc_astar ${fmt_static_lib})
target_link_libraries(poc_astar ${yaml_static_lib})
# for each cpp file build binary
foreach(poc_cpp ${poc_cpps})
add_executable(powerlinx_profile profile.cpp)
target_link_libraries(powerlinx_profile memgraph)
target_link_libraries(powerlinx_profile Threads::Threads)
target_link_libraries(powerlinx_profile ${fmt_static_lib})
target_link_libraries(powerlinx_profile ${yaml_static_lib})
# get exec name (remove extension from the abs path)
get_filename_component(exec_name ${poc_cpp} NAME_WE)
add_executable(csv_import csv_import.cpp)
target_link_libraries(csv_import memgraph)
target_link_libraries(csv_import Threads::Threads)
target_link_libraries(csv_import ${fmt_static_lib})
target_link_libraries(csv_import ${yaml_static_lib})
# set target name in format {project_name}_{test_type}_{exec_name}
set(target_name ${project_name}_poc_${exec_name})
add_executable(add_double add_double.cpp)
target_link_libraries(add_double memgraph)
target_link_libraries(add_double Threads::Threads)
target_link_libraries(add_double ${fmt_static_lib})
target_link_libraries(add_double ${yaml_static_lib})
# build exec file
add_executable(${target_name} ${poc_cpp} isolation/header.cpp)
set_property(TARGET ${target_name} PROPERTY CXX_STANDARD ${cxx_standard})
add_executable(astar_query astar_query.cpp)
target_link_libraries(astar_query memgraph)
target_link_libraries(astar_query Threads::Threads)
target_link_libraries(astar_query ${fmt_static_lib})
target_link_libraries(astar_query ${yaml_static_lib})
# OUTPUT_NAME sets the real name of a target when it is built and can be
# used to help create two targets of the same name even though CMake
# requires unique logical target names
set_target_properties(${target_name} PROPERTIES OUTPUT_NAME ${exec_name})
add_executable(size_aligment size_aligment.cpp)
target_link_libraries(size_aligment memgraph)
target_link_libraries(size_aligment Threads::Threads)
target_link_libraries(size_aligment ${fmt_static_lib})
target_link_libraries(size_aligment ${yaml_static_lib})
# link libraries
# threads (cross-platform)
target_link_libraries(${target_name} Threads::Threads)
# memgraph lib
target_link_libraries(${target_name} memgraph_lib)
# fmt format lib
target_link_libraries(${target_name} ${fmt_static_lib})
# yaml parser lib
target_link_libraries(${target_name} ${yaml_static_lib})
add_executable(isolation isolation.cpp isolation/header.cpp)
target_link_libraries(isolation ${fmt_static_lib})
endforeach()

View File

@ -14,9 +14,13 @@ using std::endl;
// Dressipi astar query of 4 clicks.
// TODO: push down appropriate
using Stream = std::ostream;
// TODO: figure out from the pattern in a query
constexpr size_t max_depth = 3;
// TODO: from query LIMIT 10
constexpr size_t limit = 10;
class Node
@ -79,10 +83,12 @@ void astar(DbAccessor &t, plan_args_t &args, Stream &stream)
auto cmp = [](Node *left, Node *right) { return left->cost > right->cost; };
std::priority_queue<Node *, std::vector<Node *>, decltype(cmp)> queue(cmp);
// TODO: internal id independent
auto start_vr = t.vertex_find(Id(args[0].as<Int64>().value()));
if (!start_vr.is_present())
{
// stream.write_failure({{}});
// TODO: stream failure
return;
}
@ -96,15 +102,19 @@ void astar(DbAccessor &t, plan_args_t &args, Stream &stream)
auto now = queue.top();
queue.pop();
if (max_depth <= now->depth)
if (now->depth >= max_depth)
{
// stream.write_success_empty();
// best.push_back(now);
// TODO: stream the result
count++;
if (count >= limit)
{
// the limit was reached -> STOP the execution
break;
}
// if the limit wasn't reached -> POP the next vertex
continue;
}
@ -130,6 +140,8 @@ public:
{
DbAccessor t(db);
// TODO: find node
astar(t, args, stream);
return t.commit();

View File

@ -2,6 +2,8 @@
# Initial version of script that is going to be used for release build.
# NOTE: do not run this script as a super user
# TODO: enable options related to lib
echo "Memgraph Release Building..."
@ -33,12 +35,14 @@ mkdir -p ../release/${exe_name}
# copy all relevant files
cp ${exe_name} ../release/${exe_name}/memgraph
cp libmemgraph_pic.a ../release/${exe_name}/libmemgraph_pic.a
rm -rf ../release/${exe_name}/include
cp -r include ../release/${exe_name}/include
cp -r template ../release/${exe_name}/template
cp -r ../config ../release/${exe_name}/config
# create compiled folder and copy hard coded queries
mkdir -p ../release/${exe_name}/compiled/cpu/hardcode
rm -rf ../release/${exe_name}/compiled/cpu/hardcode/*
cp ../tests/integration/hardcoded_query/*.cpp ../release/${exe_name}/compiled/cpu/hardcode
cp ../tests/integration/hardcoded_query/*.hpp ../release/${exe_name}/compiled/cpu/hardcode

View File

@ -1,128 +1,48 @@
cmake_minimum_required(VERSION 3.1)
project(memgraph_tests)
project(${project_name}_tests)
set(src_dir ${CMAKE_SOURCE_DIR}/src)
enable_testing()
include_directories(${catch_source_dir}/include)
# TODO: modular approach (REFACTOR)
## UNIT TESTS
# find unit tests
file(GLOB_RECURSE unit_test_files ${CMAKE_HOME_DIRECTORY}/tests/unit/*.cpp)
get_file_names("${unit_test_files}" file_names)
set(unit_test_names "${file_names}")
message(STATUS "Available unit tests are: ${unit_test_names}")
# copy unit test data
# copy test data
file(COPY ${CMAKE_SOURCE_DIR}/tests/data
DESTINATION ${CMAKE_BINARY_DIR}/tests)
# build unit tests
foreach(test ${unit_test_names})
set(test_name unit_${test})
add_executable(${test_name} unit/${test}.cpp ${src_dir}/template_engine/engine.cpp)
target_link_libraries(${test_name} memgraph)
# TODO: separate dependencies
target_link_libraries(${test_name} stdc++fs)
target_link_libraries(${test_name} cypher_lib)
target_link_libraries(${test_name} Threads::Threads)
target_link_libraries(${test_name} ${fmt_static_lib})
target_link_libraries(${test_name} ${yaml_static_lib})
add_test(NAME ${test_name} COMMAND ${test_name})
set_property(TARGET ${test_name} PROPERTY CXX_STANDARD 14)
endforeach()
# move test data data to the build directory
if (UNIX)
set(test_data "tests/data")
set(test_data_src "${CMAKE_SOURCE_DIR}/${test_data}")
set(test_data_dst "${CMAKE_BINARY_DIR}/${test_data}")
add_custom_target (test_data
COMMAND rm -rf ${test_data_dst}
COMMAND cp -r ${test_data_src} ${test_data_dst}
)
endif (UNIX)
# -----------------------------------------------------------------------------
## CONCURRENCY TESTS
# benchmark test binaries
if (ALL_TESTS OR BENCHMARK_TESTS)
add_subdirectory(${PROJECT_SOURCE_DIR}/benchmark)
endif()
# find concurrency tests
file(GLOB_RECURSE concurrency_test_files
${CMAKE_HOME_DIRECTORY}/tests/concurrent/*.cpp)
get_file_names("${concurrency_test_files}" file_names)
set(concurrency_test_names "${file_names}")
message(STATUS "Available concurrency tests are: ${concurrency_test_names}")
# concurrent test binaries
if (ALL_TESTS OR CONCURRENT_TESTS)
add_subdirectory(${PROJECT_SOURCE_DIR}/concurrent)
endif()
# build concurrency tests
foreach(test ${concurrency_test_names})
set(test_name concurrent_${test})
add_executable(${test_name} concurrent/${test}.cpp)
target_link_libraries(${test_name} memgraph)
target_link_libraries(${test_name} Threads::Threads)
target_link_libraries(${test_name} ${fmt_static_lib})
target_link_libraries(${test_name} ${yaml_static_lib})
add_test(NAME ${test_name} COMMAND ${test_name})
set_property(TARGET ${test_name} PROPERTY CXX_STANDARD 14)
endforeach()
# integration test binaries
if (ALL_TESTS OR INTEGRATION_TESTS)
add_subdirectory(${PROJECT_SOURCE_DIR}/integration)
endif()
## INTEGRATION TESTS
# manual test binaries
if (ALL_TESTS OR MANUAL_TESTS)
add_subdirectory(${PROJECT_SOURCE_DIR}/manual)
endif()
# test hard coded queries
add_executable(integration_queries integration/queries.cpp)
target_link_libraries(integration_queries stdc++fs)
target_link_libraries(integration_queries memgraph)
target_link_libraries(integration_queries Threads::Threads)
target_link_libraries(integration_queries ${fmt_static_lib})
target_link_libraries(integration_queries ${yaml_static_lib})
add_test(NAME integration_queries COMMAND integration_queries)
set_property(TARGET integration_queries PROPERTY CXX_STANDARD 14)
# test cleaning methods
add_executable(cleaning integration/cleaning.cpp)
target_link_libraries(cleaning memgraph)
target_link_libraries(cleaning Threads::Threads)
target_link_libraries(cleaning ${fmt_static_lib})
target_link_libraries(cleaning ${yaml_static_lib})
add_test(NAME cleaning COMMAND cleaning)
set_property(TARGET cleaning PROPERTY CXX_STANDARD 14)
# test snapshot validity
add_executable(snapshot integration/snapshot.cpp)
target_link_libraries(snapshot memgraph)
target_link_libraries(snapshot Threads::Threads)
target_link_libraries(snapshot ${fmt_static_lib})
target_link_libraries(snapshot ${yaml_static_lib})
add_test(NAME snapshot COMMAND snapshot)
set_property(TARGET snapshot PROPERTY CXX_STANDARD 14)
# test index validity
add_executable(index integration/index.cpp)
target_link_libraries(index memgraph)
target_link_libraries(index Threads::Threads)
target_link_libraries(index ${fmt_static_lib})
target_link_libraries(index ${yaml_static_lib})
add_test(NAME index COMMAND index)
set_property(TARGET index PROPERTY CXX_STANDARD 14)
## MANUAL TESTS
# cypher_ast
add_executable(manual_cypher_ast manual/cypher_ast.cpp)
target_link_libraries(manual_cypher_ast stdc++fs)
target_link_libraries(manual_cypher_ast memgraph)
target_link_libraries(manual_cypher_ast Threads::Threads)
target_link_libraries(manual_cypher_ast ${fmt_static_lib})
target_link_libraries(manual_cypher_ast ${yaml_static_lib})
target_link_libraries(manual_cypher_ast cypher_lib)
set_property(TARGET manual_cypher_ast PROPERTY CXX_STANDARD 14)
# query_engine
add_executable(manual_query_engine manual/query_engine.cpp)
target_link_libraries(manual_query_engine stdc++fs)
target_link_libraries(manual_query_engine memgraph)
target_link_libraries(manual_query_engine ${fmt_static_lib})
target_link_libraries(manual_query_engine ${yaml_static_lib})
target_link_libraries(manual_query_engine dl)
target_link_libraries(manual_query_engine cypher_lib)
target_link_libraries(manual_query_engine Threads::Threads)
set_property(TARGET manual_query_engine PROPERTY CXX_STANDARD 14)
# query_hasher
add_executable(manual_query_hasher manual/query_hasher.cpp)
target_link_libraries(manual_query_hasher stdc++fs)
target_link_libraries(manual_query_hasher memgraph)
target_link_libraries(manual_query_hasher ${fmt_static_lib})
target_link_libraries(manual_query_hasher ${yaml_static_lib})
target_link_libraries(manual_query_hasher Threads::Threads)
set_property(TARGET manual_query_hasher PROPERTY CXX_STANDARD 14)
# unit test binaries
if (ALL_TESTS OR UNIT_TESTS)
add_subdirectory(${PROJECT_SOURCE_DIR}/unit)
endif()

View File

@ -1,21 +1,43 @@
find_package(Threads REQUIRED)
file(GLOB_RECURSE ALL_BENCH_CPP *.cpp)
# set current directory name as a test type
get_filename_component(test_type ${CMAKE_CURRENT_SOURCE_DIR} NAME)
foreach(ONE_BENCH_CPP ${ALL_BENCH_CPP})
# get all cpp abs file names recursively starting from current directory
file(GLOB_RECURSE test_type_cpps *.cpp)
message(STATUS "Available ${test_type} cpp files are: ${test_type_cpps}")
get_filename_component(ONE_BENCH_EXEC ${ONE_BENCH_CPP} NAME_WE)
# for each cpp file build binary and register test
foreach(test_cpp ${test_type_cpps})
# Avoid name collision
set(TARGET_NAME Bench_${ONE_BENCH_EXEC})
# get exec name (remove extension from the abs path)
get_filename_component(exec_name ${test_cpp} NAME_WE)
add_executable(${TARGET_NAME} ${ONE_BENCH_CPP})
set_target_properties(${TARGET_NAME} PROPERTIES OUTPUT_NAME ${ONE_BENCH_EXEC})
target_link_libraries(${TARGET_NAME} benchmark ${CMAKE_THREAD_LIBS_INIT})
target_link_libraries(${TARGET_NAME} memgraph)
target_link_libraries(${TARGET_NAME} ${fmt_static_lib})
target_link_libraries(${TARGET_NAME} Threads::Threads)
target_link_libraries(${TARGET_NAME} ${yaml_static_lib})
add_test(${TARGET_NAME} ${ONE_BENCH_EXEC})
# set target name in format {project_name}_{test_type}_{exec_name}
set(target_name ${project_name}_${test_type}_${exec_name})
# build exec file
add_executable(${target_name} ${test_cpp})
set_property(TARGET ${target_name} PROPERTY CXX_STANDARD ${cxx_standard})
# OUTPUT_NAME sets the real name of a target when it is built and can be
# used to help create two targets of the same name even though CMake
# requires unique logical target names
set_target_properties(${target_name} PROPERTIES OUTPUT_NAME ${exec_name})
# link libraries
# threads (cross-platform)
target_link_libraries(${target_name} Threads::Threads)
# google-benchmark
target_link_libraries(${target_name} benchmark ${CMAKE_THREAD_LIBS_INIT})
# memgraph lib
target_link_libraries(${target_name} memgraph_lib)
# fmt format lib
target_link_libraries(${target_name} ${fmt_static_lib})
# yaml parser lib
target_link_libraries(${target_name} ${yaml_static_lib})
# register test
add_test(${target_name} ${exec_name})
endforeach()

View File

@ -1,34 +0,0 @@
#include "benchmark/benchmark_api.h"
#include <set>
#include <vector>
static void BM_VectorInsert(benchmark::State &state)
{
while (state.KeepRunning()) {
std::vector<int> insertion_test;
for (int i = 0, i_end = state.range_x(); i < i_end; i++) {
insertion_test.push_back(i);
}
}
}
// Register the function as a benchmark
BENCHMARK(BM_VectorInsert)->Range(8, 8 << 10);
//~~~~~~~~~~~~~~~~
// Define another benchmark
static void BM_SetInsert(benchmark::State &state)
{
while (state.KeepRunning()) {
std::set<int> insertion_test;
for (int i = 0, i_end = state.range_x(); i < i_end; i++) {
insertion_test.insert(i);
}
}
}
BENCHMARK(BM_SetInsert)->Range(8, 8 << 10);
BENCHMARK_MAIN();

View File

@ -1,34 +0,0 @@
#include "benchmark/benchmark_api.h"
#include <set>
#include <vector>
static void BM_VectorInsert(benchmark::State &state)
{
while (state.KeepRunning()) {
std::vector<int> insertion_test;
for (int i = 0, i_end = state.range_x(); i < i_end; i++) {
insertion_test.push_back(i);
}
}
}
// Register the function as a benchmark
BENCHMARK(BM_VectorInsert)->Range(8, 8 << 10);
//~~~~~~~~~~~~~~~~
// Define another benchmark
static void BM_SetInsert(benchmark::State &state)
{
while (state.KeepRunning()) {
std::set<int> insertion_test;
for (int i = 0, i_end = state.range_x(); i < i_end; i++) {
insertion_test.insert(i);
}
}
}
BENCHMARK(BM_SetInsert)->Range(8, 8 << 10);
BENCHMARK_MAIN();

View File

@ -0,0 +1,41 @@
find_package(Threads REQUIRED)
# set current directory name as a test type
get_filename_component(test_type ${CMAKE_CURRENT_SOURCE_DIR} NAME)
# get all cpp abs file names recursively starting from current directory
file(GLOB_RECURSE test_type_cpps *.cpp)
message(STATUS "Available ${test_type} cpp files are: ${test_type_cpps}")
# for each cpp file build binary and register test
foreach(test_cpp ${test_type_cpps})
# get exec name (remove extension from the abs path)
get_filename_component(exec_name ${test_cpp} NAME_WE)
# set target name in format {project_name}_{test_type}_{exec_name}
set(target_name ${project_name}_${test_type}_${exec_name})
# build exec file
add_executable(${target_name} ${test_cpp})
set_property(TARGET ${target_name} PROPERTY CXX_STANDARD ${cxx_standard})
# OUTPUT_NAME sets the real name of a target when it is built and can be
# used to help create two targets of the same name even though CMake
# requires unique logical target names
set_target_properties(${target_name} PROPERTIES OUTPUT_NAME ${exec_name})
# link libraries
# threads (cross-platform)
target_link_libraries(${target_name} Threads::Threads)
# memgraph lib
target_link_libraries(${target_name} memgraph_lib)
# fmt format lib
target_link_libraries(${target_name} ${fmt_static_lib})
# yaml parser lib
target_link_libraries(${target_name} ${yaml_static_lib})
# register test
add_test(${target_name} ${exec_name})
endforeach()

View File

@ -0,0 +1,70 @@
CREATE (g:garment {garment_id: 1234, garment_category_id: 1, conceals: 30}) RETURN g
MATCH(g:garment {garment_id: 1234}) SET g:AA RETURN g
MATCH(g:garment {garment_id: 1234}) SET g:BB RETURN g
MATCH(g:garment {garment_id: 1234}) SET g:EE RETURN g
CREATE (g:garment {garment_id: 2345, garment_category_id: 6, reveals: 10}) RETURN g
MATCH(g:garment {garment_id: 2345}) SET g:CC RETURN g
MATCH(g:garment {garment_id: 2345}) SET g:DD RETURN g
CREATE (g:garment {garment_id: 3456, garment_category_id: 8}) RETURN g
MATCH(g:garment {garment_id: 3456}) SET g:CC RETURN g
MATCH(g:garment {garment_id: 3456}) SET g:DD RETURN g
CREATE (g:garment {garment_id: 4567, garment_category_id: 15}) RETURN g
MATCH(g:garment {garment_id: 4567}) SET g:AA RETURN g
MATCH(g:garment {garment_id: 4567}) SET g:BB RETURN g
MATCH(g:garment {garment_id: 4567}) SET g:DD RETURN g
CREATE (g:garment {garment_id: 5678, garment_category_id: 19}) RETURN g
MATCH(g:garment {garment_id: 5678}) SET g:BB RETURN g
MATCH(g:garment {garment_id: 5678}) SET g:CC RETURN g
MATCH(g:garment {garment_id: 5678}) SET g:EE RETURN g
CREATE (g:garment {garment_id: 6789, garment_category_id: 3}) RETURN g
MATCH(g:garment {garment_id: 6789}) SET g:AA RETURN g
MATCH(g:garment {garment_id: 6789}) SET g:DD RETURN g
MATCH(g:garment {garment_id: 6789}) SET g:EE RETURN g
CREATE (g:garment {garment_id: 7890, garment_category_id: 25}) RETURN g
MATCH(g:garment {garment_id: 7890}) SET g:AA RETURN g
MATCH(g:garment {garment_id: 7890}) SET g:BB RETURN g
MATCH(g:garment {garment_id: 7890}) SET g:CC RETURN g
MATCH(g:garment {garment_id: 7890}) SET g:EE RETURN g
MATCH (g1:garment {garment_id: 1234}), (g2:garment {garment_id: 4567}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
MATCH (g1:garment {garment_id: 1234}), (g2:garment {garment_id: 5678}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
MATCH (g1:garment {garment_id: 1234}), (g2:garment {garment_id: 6789}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
MATCH (g1:garment {garment_id: 1234}), (g2:garment {garment_id: 7890}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
MATCH (g1:garment {garment_id: 4567}), (g2:garment {garment_id: 6789}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
MATCH (g1:garment {garment_id: 4567}), (g2:garment {garment_id: 7890}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
MATCH (g1:garment {garment_id: 4567}), (g2:garment {garment_id: 5678}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
MATCH (g1:garment {garment_id: 6789}), (g2:garment {garment_id: 7890}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
MATCH (g1:garment {garment_id: 5678}), (g2:garment {garment_id: 7890}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
MATCH (g1:garment {garment_id: 2345}), (g2:garment {garment_id: 3456}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
MATCH (g1:garment {garment_id: 2345}), (g2:garment {garment_id: 5678}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
MATCH (g1:garment {garment_id: 2345}), (g2:garment {garment_id: 6789}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
MATCH (g1:garment {garment_id: 2345}), (g2:garment {garment_id: 7890}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
MATCH (g1:garment {garment_id: 2345}), (g2:garment {garment_id: 4567}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
MATCH (g1:garment {garment_id: 3456}), (g2:garment {garment_id: 5678}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
MATCH (g1:garment {garment_id: 3456}), (g2:garment {garment_id: 6789}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
MATCH (g1:garment {garment_id: 3456}), (g2:garment {garment_id: 7890}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
MATCH (g1:garment {garment_id: 3456}), (g2:garment {garment_id: 4567}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
CREATE (p:profile {profile_id: 111, partner_id: 55, reveals: 30}) RETURN p
CREATE (p:profile {profile_id: 112, partner_id: 55}) RETURN p
CREATE (p:profile {profile_id: 112, partner_id: 77, conceals: 10}) RETURN p
MATCH (p:profile {profile_id: 111, partner_id: 55}), (g:garment {garment_id: 1234}) CREATE (p)-[s:score]->(g) SET s.score=1500 RETURN s
MATCH (p:profile {profile_id: 111, partner_id: 55}), (g:garment {garment_id: 2345}) CREATE (p)-[s:score]->(g) SET s.score=1200 RETURN s
MATCH (p:profile {profile_id: 111, partner_id: 55}), (g:garment {garment_id: 3456}) CREATE (p)-[s:score]->(g) SET s.score=1000 RETURN s
MATCH (p:profile {profile_id: 111, partner_id: 55}), (g:garment {garment_id: 4567}) CREATE (p)-[s:score]->(g) SET s.score=1000 RETURN s
MATCH (p:profile {profile_id: 111, partner_id: 55}), (g:garment {garment_id: 6789}) CREATE (p)-[s:score]->(g) SET s.score=1500 RETURN s
MATCH (p:profile {profile_id: 111, partner_id: 55}), (g:garment {garment_id: 7890}) CREATE (p)-[s:score]->(g) SET s.score=1800 RETURN s
MATCH (p:profile {profile_id: 112, partner_id: 55}), (g:garment {garment_id: 1234}) CREATE (p)-[s:score]->(g) SET s.score=2000 RETURN s
MATCH (p:profile {profile_id: 112, partner_id: 55}), (g:garment {garment_id: 4567}) CREATE (p)-[s:score]->(g) SET s.score=1500 RETURN s
MATCH (p:profile {profile_id: 112, partner_id: 55}), (g:garment {garment_id: 5678}) CREATE (p)-[s:score]->(g) SET s.score=1000 RETURN s
MATCH (p:profile {profile_id: 112, partner_id: 55}), (g:garment {garment_id: 6789}) CREATE (p)-[s:score]->(g) SET s.score=1600 RETURN s
MATCH (p:profile {profile_id: 112, partner_id: 55}), (g:garment {garment_id: 7890}) CREATE (p)-[s:score]->(g) SET s.score=1900 RETURN s
MATCH (p:profile {profile_id: 112, partner_id: 77}), (g:garment {garment_id: 1234}) CREATE (p)-[s:score]->(g) SET s.score=1500 RETURN s
MATCH (p:profile {profile_id: 112, partner_id: 77}), (g:garment {garment_id: 2345}) CREATE (p)-[s:score]->(g) SET s.score=1300 RETURN s
MATCH (p:profile {profile_id: 112, partner_id: 77}), (g:garment {garment_id: 3456}) CREATE (p)-[s:score]->(g) SET s.score=1300 RETURN s
MATCH (p:profile {profile_id: 112, partner_id: 77}), (g:garment {garment_id: 5678}) CREATE (p)-[s:score]->(g) SET s.score=1200 RETURN s
MATCH (p:profile {profile_id: 112, partner_id: 77}), (g:garment {garment_id: 6789}) CREATE (p)-[s:score]->(g) SET s.score=1700 RETURN s
MATCH (p:profile {profile_id: 112, partner_id: 77}), (g:garment {garment_id: 7890}) CREATE (p)-[s:score]->(g) SET s.score=1900 RETURN s
MATCH (a:garment)-[:default_outfit]-(b:garment)-[:default_outfit]-(c:garment)-[:default_outfit]-(d:garment)-[:default_outfit]-(a:garment)-[:default_outfit]-(c:garment), (b:garment)-[:default_outfit]-(d:garment) WHERE a.garment_id = 1234 RETURN a.garment_id, b.garment_id, c.garment_id, d.garment_id ORDER BY (a.score + b.score + c.score + d.score) DESC LIMIT 10

View File

@ -2,26 +2,29 @@ CREATE (n:LABEL {name: "TEST01"}) RETURN n
CREATE (n:LABEL {name: "TEST02"}) RETURN n
CREATE (n:LABEL {name: "TEST2"}) RETURN n
CREATE (n:LABEL {name: "TEST3"}) RETURN n
CREATE (n:OTHER {name: "TEST4"}) RETURN n
CREATE (n:ACCOUNT {id: 2322, name: "TEST", country: "Croatia", "created_at": 2352352}) RETURN n
MATCH (n {id: 0}) RETURN n", "MATCH (n {id: 1}) RETURN n
MATCH (n {id: 2}) RETURN n", "MATCH (n {id: 3}) RETURN n
MATCH (n {id: 0}) RETURN n
MATCH (n {id: 1}) RETURN n
MATCH (n {id: 2}) RETURN n
MATCH (n {id: 3}) RETURN n
MATCH (a {id:0}), (p {id: 1}) CREATE (a)-[r:IS]->(p) RETURN r
MATCH (a {id:1}), (p {id: 2}) CREATE (a)-[r:IS]->(p) RETURN r
MATCH ()-[r]-() WHERE ID(r)=0 RETURN r
MATCH ()-[r]-() WHERE ID(r)=1 RETURN r
MATCH (n: {id: 0}) SET n.name = "TEST100" RETURN n
MATCH (n: {id: 1}) SET n.name = "TEST101" RETURN n
MATCH (n: {id: 0}) SET n.name = "TEST102" RETURN n
MATCH (n:LABEL) RETURN n"
MATCH (n {id: 0}) SET n.name = "TEST100" RETURN n
MATCH (n {id: 1}) SET n.name = "TEST101" RETURN n
MATCH (n {id: 0}) SET n.name = "TEST102" RETURN n
MATCH (n:LABEL) RETURN n
MATCH (n1), (n2) WHERE ID(n1)=0 AND ID(n2)=1 CREATE (n1)<-[r:IS {age: 25,weight: 70}]-(n2) RETURN r
MATCH (n) RETURN n", "MATCH (n:LABEL) RETURN n", "MATCH (n) DELETE n
MATCH (n:LABEL) DELETE n", "MATCH (n) WHERE ID(n) = 0 DELETE n
MATCH ()-[r]-() WHERE ID(r) = 0 DELETE r", "MATCH ()-[r]-() DELETE r
MATCH (n) RETURN n
MATCH (n:LABEL) RETURN n
MATCH (n) DELETE n
MATCH (n:LABEL) DELETE n
MATCH (n) WHERE ID(n) = 0 DELETE n
MATCH ()-[r]-() WHERE ID(r) = 0 DELETE r
MATCH ()-[r]-() DELETE r
MATCH ()-[r:TYPE]-() DELETE r
MATCH (n)-[:TYPE]->(m) WHERE ID(n) = 0 RETURN m
MATCH (n)-[:TYPE]->(m) WHERE n.name = "kruno" RETURN m
MATCH (n)-[:TYPE]->(m) WHERE n.name = "kruno" RETURN n,m
MATCH (n:LABEL)-[:TYPE]->(m) RETURN n"
CREATE (n:LABEL1:LABEL2 {name: "TEST01", age: 20}) RETURN n
MATCH (n:LABEL1:LABEL2 {name: "TEST01", age: 20}) RETURN n
MATCH (n:LABEL)-[:TYPE]->(m) RETURN n

View File

@ -1,2 +0,0 @@
MERGE (g1:garment {garment_id: 1234})-[r:default_outfit]-(g2:garment {garment_id: 2345}) RETURN r
MATCH (p:profile {profile_id: 111, partner_id: 55})-[s:score]-(g.garment {garment_id: 1234}) DELETE s

View File

@ -0,0 +1,43 @@
find_package(Threads REQUIRED)
# set current directory name as a test type
get_filename_component(test_type ${CMAKE_CURRENT_SOURCE_DIR} NAME)
# get all cpp abs file names recursively starting from current directory
file(GLOB test_type_cpps *.cpp)
message(STATUS "Available ${test_type} cpp files are: ${test_type_cpps}")
# for each cpp file build binary and register test
foreach(test_cpp ${test_type_cpps})
# get exec name (remove extension from the abs path)
get_filename_component(exec_name ${test_cpp} NAME_WE)
# set target name in format {project_name}_{test_type}_{exec_name}
set(target_name ${project_name}_${test_type}_${exec_name})
# build exec file
add_executable(${target_name} ${test_cpp})
set_property(TARGET ${target_name} PROPERTY CXX_STANDARD ${cxx_standard})
# OUTPUT_NAME sets the real name of a target when it is built and can be
# used to help create two targets of the same name even though CMake
# requires unique logical target names
set_target_properties(${target_name} PROPERTIES OUTPUT_NAME ${exec_name})
# link libraries
# filesystem
target_link_libraries(${target_name} stdc++fs)
# threads (cross-platform)
target_link_libraries(${target_name} Threads::Threads)
# memgraph lib
target_link_libraries(${target_name} memgraph_lib)
# fmt format lib
target_link_libraries(${target_name} ${fmt_static_lib})
# yaml parser lib
target_link_libraries(${target_name} ${yaml_static_lib})
# register test
add_test(${target_name} ${exec_name})
endforeach()

View File

@ -17,7 +17,7 @@ auto load_basic_functions(Db &db)
vertex_accessor.set(property_key, std::move(args[0]));
return t.commit();
};
functions[11597417457737499503u] = create_node;
functions[3191791685918807343u] = create_node;
// CREATE (n:LABEL {name: "TEST"}) RETURN n;
auto create_labeled_and_named_node = [&db](properties_t &&args) {
@ -29,6 +29,19 @@ auto load_basic_functions(Db &db)
vertex_accessor.add_label(label);
return t.commit();
};
functions[8273374963505210457u] = create_labeled_and_named_node;
// CREATE (n:OTHER {name: "cleaner_test"}) RETURN n
auto create_node_with_other_label = [&db](properties_t &&args) {
DbAccessor t(db);
auto property_key = t.vertex_property_key("name", args[0].key.flags());
auto &label = t.label_find_or_create("OTHER");
auto vertex_accessor = t.vertex_insert();
vertex_accessor.set(property_key, std::move(args[0]));
vertex_accessor.add_label(label);
return t.commit();
};
functions[6237439055665132277u] = create_node_with_other_label;
// CREATE (n:OTHER {name: "TEST"}) RETURN n;
auto create_labeled_and_named_node_v2 = [&db](properties_t &&args) {
@ -40,7 +53,9 @@ auto load_basic_functions(Db &db)
vertex_accessor.add_label(label);
return t.commit();
};
functions[832997784138269151u] = create_labeled_and_named_node_v2;
// CREATE (n:ACCOUNT {id: 2322, name: "TEST", country: "Croatia", "created_at": 2352352}) RETURN n
auto create_account = [&db](properties_t &&args) {
DbAccessor t(db);
auto prop_id = t.vertex_property_key("id", args[0].key.flags());
@ -58,7 +73,12 @@ auto load_basic_functions(Db &db)
vertex_accessor.add_label(label);
return t.commit();
};
functions[16701745788564313211u] = create_account;
// TODO: inconsistency but it doesn't affect the integration tests
// this is not a unique case
// MATCH (n) WHERE ID(n) = 1 RETURN n
// MATCH (n {id: 0}) RETURN n
auto find_node_by_internal_id = [&db](properties_t &&args) {
DbAccessor t(db);
auto maybe_va = t.vertex_find(Id(args[0].as<Int64>().value()));
@ -75,7 +95,10 @@ auto load_basic_functions(Db &db)
}
return t.commit();
};
functions[1444315501940151196u] = find_node_by_internal_id;
functions[11624983287202420303u] = find_node_by_internal_id;
// MATCH (a {id:0}), (p {id: 1}) CREATE (a)-[r:IS]->(p) RETURN r
auto create_edge = [&db](properties_t &&args) {
DbAccessor t(db);
auto &edge_type = t.type_find_or_create("IS");
@ -98,7 +121,9 @@ auto load_basic_functions(Db &db)
return ret;
};
functions[6972641167053231355u] = create_edge;
// MATCH ()-[r]-() WHERE ID(r)=0 RETURN r
auto find_edge_by_internal_id = [&db](properties_t &&args) {
DbAccessor t(db);
auto maybe_ea = t.edge_find(args[0].as<Int64>().value());
@ -122,7 +147,9 @@ auto load_basic_functions(Db &db)
return t.commit();
};
functions[15080095524051312786u] = find_edge_by_internal_id;
// MATCH (n {id: 0}) SET n.name = "TEST102" RETURN n
auto update_node = [&db](properties_t &&args) {
DbAccessor t(db);
auto prop_name = t.vertex_property_key("name", args[1].key.flags());
@ -136,6 +163,7 @@ auto load_basic_functions(Db &db)
return t.commit();
};
functions[2835161674800069655u] = update_node;
// MATCH (n1), (n2) WHERE ID(n1)=0 AND ID(n2)=1 CREATE (n1)<-[r:IS {age: 25,
// weight: 70}]-(n2) RETURN r
@ -157,6 +185,7 @@ auto load_basic_functions(Db &db)
return t.commit();
};
functions[10360716473890539004u] = create_edge_v2;
// MATCH (n) RETURN n
auto match_all_nodes = [&db](properties_t &&args) {
@ -167,6 +196,7 @@ auto load_basic_functions(Db &db)
return t.commit();
};
functions[5949923385370229113u] = match_all_nodes;
// MATCH (n:LABEL) RETURN n
auto match_by_label = [&db](properties_t &&args) {
@ -181,6 +211,7 @@ auto load_basic_functions(Db &db)
return t.commit();
};
functions[16533049303627288013u] = match_by_label;
// MATCH (n) DELETE n
auto match_all_delete = [&db](properties_t &&args) {
@ -196,6 +227,7 @@ auto load_basic_functions(Db &db)
return t.commit();
};
functions[16628411757092333638u] = match_all_delete;
// MATCH (n:LABEL) DELETE n
auto match_label_delete = [&db](properties_t &&args) {
@ -208,6 +240,7 @@ auto load_basic_functions(Db &db)
return t.commit();
};
functions[10022871879682099034u] = match_label_delete;
// MATCH (n) WHERE ID(n) = id DELETE n
auto match_id_delete = [&db](properties_t &&args) {
@ -221,6 +254,7 @@ auto load_basic_functions(Db &db)
return t.commit();
};
functions[5375628876334795080u] = match_id_delete;
// MATCH ()-[r]-() WHERE ID(r) = id DELETE r
auto match_edge_id_delete = [&db](properties_t &&args) {
@ -234,15 +268,17 @@ auto load_basic_functions(Db &db)
return t.commit();
};
functions[11747491556476630933u] = match_edge_id_delete;
// MATCH ()-[r]-() DELETE r
auto match_edge_all_delete = [&db](properties_t &&args) {
auto match_edge_all_delete = [&db](properties_t &&) {
DbAccessor t(db);
t.edge_access().fill().for_all([&](auto a) { a.remove(); });
return t.commit();
};
functions[10064744449500095415u] = match_edge_all_delete;
// MATCH ()-[r:TYPE]-() DELETE r
auto match_edge_type_delete = [&db](properties_t &&args) {
@ -254,6 +290,7 @@ auto load_basic_functions(Db &db)
return t.commit();
};
functions[6084209470626828855u] = match_edge_type_delete;
// MATCH (n)-[:TYPE]->(m) WHERE ID(n) = id RETURN m
auto match_id_type_return = [&db](properties_t &&args) {
@ -275,6 +312,7 @@ auto load_basic_functions(Db &db)
return t.commit();
};
functions[2605621337795673948u] = match_id_type_return;
// MATCH (n)-[:TYPE]->(m) WHERE n.name = "kruno" RETURN m
auto match_name_type_return = [&db](properties_t &&args) {
@ -313,6 +351,7 @@ auto load_basic_functions(Db &db)
return t.commit();
};
functions[17303982256920342123u] = match_name_type_return;
// MATCH (n)-[:TYPE]->(m) WHERE n.name = "kruno" RETURN n,m
auto match_name_type_return_cross = [&db](properties_t &&args) {
@ -393,6 +432,7 @@ auto load_basic_functions(Db &db)
return t.commit();
};
functions[17456874322957005665u] = match_name_type_return_cross;
// MATCH (n:LABEL)-[:TYPE]->(m) RETURN n
auto match_label_type_return = [&db](properties_t &&args) {
@ -433,8 +473,8 @@ auto load_basic_functions(Db &db)
t.abort();
return false;
}
};
functions[4866842751631597263u] = match_label_type_return;
// MATCH (n:LABEL {name: "TEST01"}) RETURN n;
auto match_label_property = [&db](properties_t &&args) {
@ -454,33 +494,7 @@ auto load_basic_functions(Db &db)
return false;
}
};
functions[17721584194272598838u] = match_label_property;
functions[15284086425088081497u] = match_all_nodes;
functions[4857652843629217005u] = match_by_label;
functions[15648836733456301916u] = create_edge_v2;
functions[10597108978382323595u] = create_account;
functions[5397556489557792025u] = create_labeled_and_named_node;
// TODO: query hasher reports two hash values
functions[998725786176032607u] = create_labeled_and_named_node_v2;
functions[16090682663946456821u] = create_labeled_and_named_node_v2;
functions[7939106225150551899u] = create_edge;
functions[6579425155585886196u] = create_edge;
functions[11198568396549106428u] = find_node_by_internal_id;
functions[8320600413058284114u] = find_edge_by_internal_id;
functions[6813335159006269041u] = update_node;
functions[10506105811763742758u] = match_all_delete;
functions[13742779491897528506u] = match_label_delete;
functions[11349462498691305864u] = match_id_delete;
functions[6963549500479100885u] = match_edge_id_delete;
functions[14897166600223619735u] = match_edge_all_delete;
functions[16888549834923624215u] = match_edge_type_delete;
functions[11675960684124428508u] = match_id_type_return;
functions[15698881472054193835u] = match_name_type_return;
functions[12595102442911913761u] = match_name_type_return_cross;
functions[8918221081398321263u] = match_label_type_return;
functions[7710665404758409302u] = match_label_property;
return functions;
}

View File

@ -633,6 +633,10 @@ auto load_dressipi_functions(Db &db)
return t.commit();
};
// Query: MATCH (a:garment)-[:default_outfit]-(b:garment)-[:default_outfit]-(c:garment)-[:default_outfit]-(d:garment)-[:default_outfit]-(a:garment)-[:default_outfit]-(c:garment), (b:garment)-[:default_outfit]-(d:garment) WHERE a.garment_id = 1234 RETURN a.garment_id, b.garment_id, c.garment_id, d.garment_id ORDER BY (a.score + b.score + c.score + d.score) DESC LIMIT 10
// Hash: 11856262817829095719
// TODO: automate
return functions;
}
}

View File

@ -0,0 +1,185 @@
#include <iostream>
#include <queue>
#include <string>
#include <vector>
#include "query/i_plan_cpu.hpp"
#include "query/util.hpp"
#include "storage/edge_x_vertex.hpp"
#include "storage/model/properties/all.hpp"
#include "storage/vertex_accessor.hpp"
#include "using.hpp"
#include "utils/memory/stack_allocator.hpp"
using std::cout;
using std::endl;
// Dressipi astar query of 4 clicks.
// TODO: figure out from the pattern in a query
constexpr size_t max_depth = 3;
// TODO: from query LIMIT 10
constexpr size_t limit = 10;
class Node
{
public:
Node *parent = {nullptr};
VertexPropertyType<Float> tkey;
double cost;
int depth = {0};
double sum = {0.0};
VertexAccessor vacc;
Node(VertexAccessor vacc, double cost,
VertexPropertyType<Float> const &tkey)
: cost(cost), vacc(vacc), tkey(tkey)
{
}
Node(VertexAccessor vacc, double cost, Node *parent,
VertexPropertyType<Float> const &tkey)
: cost(cost), vacc(vacc), parent(parent), depth(parent->depth + 1),
tkey(tkey)
{
}
double sum_vertex_score()
{
auto now = this;
double sum = 0;
do
{
sum += (now->vacc.at(tkey).get())->value();
now = now->parent;
} while (now != nullptr);
this->sum = sum;
return sum;
}
};
bool vertex_filter_contained(DbAccessor &t, VertexAccessor &v, Node *before)
{
if (v.fill())
{
bool found;
do
{
found = false;
before = before->parent;
if (before == nullptr)
{
return true;
}
} while (v.in_contains(before->vacc));
}
return false;
}
template <typename Stream>
auto astar(VertexAccessor &va, DbAccessor &t, plan_args_t &, Stream &)
{
StackAllocator stack;
std::vector<Node *> results;
// TODO: variable part (extract)
VertexPropertyType<Float> tkey = t.vertex_property_key<Float>("score");
auto cmp = [](Node *left, Node *right) { return left->cost > right->cost; };
std::priority_queue<Node *, std::vector<Node *>, decltype(cmp)> queue(cmp);
Node *start = new (stack.allocate<Node>()) Node(va, 0, tkey);
queue.push(start);
size_t count = 0;
do
{
auto now = queue.top();
queue.pop();
if (now->depth >= max_depth)
{
now->sum_vertex_score();
results.emplace_back(now);
count++;
if (count >= limit)
{
// the limit was reached -> STOP the execution
break;
}
// if the limit wasn't reached -> POP the next vertex
continue;
}
iter::for_all(now->vacc.out(), [&](auto edge) {
VertexAccessor va = edge.to();
if (vertex_filter_contained(t, va, now))
{
auto cost = 1 - va.at(tkey).get()->value();
Node *n = new (stack.allocate<Node>())
Node(va, now->cost + cost, now, tkey);
queue.push(n);
}
});
} while (!queue.empty());
stack.free();
return results;
}
void reverse_stream_ids(Node *node, Stream& stream, VertexPropertyKey key)
{
if (node == nullptr)
return;
reverse_stream_ids(node->parent, stream, key);
stream.write(node->vacc.at(key).template as<Int64>());
}
class PlanCPU : public IPlanCPU<Stream>
{
public:
bool run(Db &db, plan_args_t &args, Stream &stream) override
{
DbAccessor t(db);
indices_t indices = {{"garment_id", 0}};
auto properties = query_properties(indices, args);
auto &label = t.label_find_or_create("garment");
auto garment_id_prop_key =
t.vertex_property_key("garment_id", args[0].key.flags());
stream.write_fields(
{{"a.garment_id", "b.garment_id", "c.garment_id", "d.garment_id"}});
label.index()
.for_range(t)
.properties_filter(t, properties)
.for_all([&](auto va) {
auto results = astar(va, t, args, stream);
std::sort(results.begin(), results.end(),
[](Node *a, Node *b) { return a->sum > b->sum; });
for (auto node : results)
{
stream.write_record();
stream.write_list_header(max_depth + 1);
reverse_stream_ids(node, stream, garment_id_prop_key);
}
});
stream.write_empty_fields();
stream.write_meta("r");
return t.commit();
}
~PlanCPU() {}
};
extern "C" IPlanCPU<Stream> *produce() { return new PlanCPU(); }
extern "C" void destruct(IPlanCPU<Stream> *p) { delete p; }

View File

@ -8,6 +8,7 @@
#include "utils/string/file.hpp"
#include "utils/variadic/variadic.hpp"
#include "utils/command_line/arguments.hpp"
#include "stream/print_record_stream.hpp"
Logger logger;
@ -15,10 +16,14 @@ int main(int argc, char *argv[])
{
auto arguments = all_arguments(argc, argv);
PrintRecordStream stream(std::cout);
// POSSIBILITIES: basic, dressipi
auto suite_name = get_argument(arguments, "-s", "basic");
// POSSIBILITIES: query_execution, hash_generation
auto work_mode = get_argument(arguments, "-w", "query_execution");
// POSSIBILITIES: mg_basic.txt, dressipi_basic.txt, dressipi_graph.txt
auto query_set_filename = get_argument(arguments, "-q", "mg_basic.txt");
// init logging
logging::init_sync();
@ -39,7 +44,7 @@ int main(int argc, char *argv[])
auto stripper = make_query_stripper(TK_LONG, TK_FLOAT, TK_STR, TK_BOOL);
// load quries
std::string file_path = "data/queries/core/" + suite_name + ".txt";
std::string file_path = "data/queries/core/" + query_set_filename;
auto queries = utils::read_lines(file_path.c_str());
// execute all queries

View File

@ -0,0 +1,140 @@
#pragma once
#include <string>
#include <vector>
#include <map>
#include "utils/exceptions/not_yet_implemented.hpp"
class PrintRecordStream
{
private:
std::ostream& stream;
public:
PrintRecordStream(std::ostream &stream) : stream(stream) {}
void write_success()
{
stream << "SUCCESS\n";
}
void write_success_empty()
{
stream << "SUCCESS EMPTY\n";
}
void write_ignored()
{
stream << "IGNORED\n";
}
void write_empty_fields()
{
stream << "EMPTY FIELDS\n";
}
void write_fields(const std::vector<std::string> &fields)
{
stream << "FIELDS:";
for (auto &field : fields)
{
stream << " " << field;
}
stream << '\n';
}
void write_field(const std::string &field)
{
stream << "Field: " << field << '\n';
}
void write_list_header(size_t size)
{
stream << "List: " << size << '\n';
}
void write_record()
{
stream << "Record\n";
}
void write_meta(const std::string &type)
{
stream << "Meta: " << type;
}
void write_failure(const std::map<std::string, std::string> &data)
{
throw NotYetImplemented();
}
void write_count(const size_t count)
{
throw NotYetImplemented();
}
void write(const VertexAccessor &vertex)
{
throw NotYetImplemented();
}
void write_vertex_record(const VertexAccessor& va)
{
throw NotYetImplemented();
}
void write(const EdgeAccessor &edge)
{
throw NotYetImplemented();
}
void write_edge_record(const EdgeAccessor& ea)
{
throw NotYetImplemented();
}
void write(const StoredProperty<TypeGroupEdge> &prop)
{
// prop.accept(serializer);
throw NotYetImplemented();
}
void write(const StoredProperty<TypeGroupVertex> &prop)
{
// prop.accept(serializer);
throw NotYetImplemented();
}
void write(const Null &prop)
{
throw NotYetImplemented();
}
void write(const Bool &prop)
{
throw NotYetImplemented();
}
void write(const Float &prop) { throw NotYetImplemented(); }
void write(const Int32 &prop) { throw NotYetImplemented(); }
void write(const Int64 &prop) { throw NotYetImplemented(); }
void write(const Double &prop) { throw NotYetImplemented(); }
void write(const String &prop) { throw NotYetImplemented(); }
void write(const ArrayBool &prop) { throw NotYetImplemented(); }
void write(const ArrayInt32 &prop) { throw NotYetImplemented(); }
void write(const ArrayInt64 &prop) { throw NotYetImplemented(); }
void write(const ArrayFloat &prop) { throw NotYetImplemented(); }
void write(const ArrayDouble &prop) { throw NotYetImplemented(); }
void write(const ArrayString &prop) { throw NotYetImplemented(); }
void send()
{
throw NotYetImplemented();
}
void chunk()
{
throw NotYetImplemented();
}
};

View File

@ -0,0 +1,44 @@
find_package(Threads REQUIRED)
# set current directory name as a test type
get_filename_component(test_type ${CMAKE_CURRENT_SOURCE_DIR} NAME)
# get all cpp abs file names recursively starting from current directory
file(GLOB_RECURSE test_type_cpps *.cpp)
message(STATUS "Available ${test_type} cpp files are: ${test_type_cpps}")
# for each cpp file build binary and register test
foreach(test_cpp ${test_type_cpps})
# get exec name (remove extension from the abs path)
get_filename_component(exec_name ${test_cpp} NAME_WE)
# set target name in format {project_name}_{test_type}_{exec_name}
set(target_name ${project_name}_${test_type}_${exec_name})
# build exec file
add_executable(${target_name} ${test_cpp})
set_property(TARGET ${target_name} PROPERTY CXX_STANDARD ${cxx_standard})
# OUTPUT_NAME sets the real name of a target when it is built and can be
# used to help create two targets of the same name even though CMake
# requires unique logical target names
set_target_properties(${target_name} PROPERTIES OUTPUT_NAME ${exec_name})
# link libraries
# filesystem
target_link_libraries(${target_name} stdc++fs)
# threads (cross-platform)
target_link_libraries(${target_name} Threads::Threads)
# memgraph lib
target_link_libraries(${target_name} memgraph_lib)
# fmt format lib
target_link_libraries(${target_name} ${fmt_static_lib})
# yaml parser lib
target_link_libraries(${target_name} ${yaml_static_lib})
# cypher lib
target_link_libraries(${target_name} cypher_lib)
# dynamic lib
target_link_libraries(${target_name} dl)
endforeach()

View File

@ -1,48 +0,0 @@
# compiler
CXX=clang++
# compile flags
CFLAGS=-std=c++1y -pthread -g2 # -D_GLIBCXX_DEBUG
# includes and libraries
INCLUDE_PATHS=-I../../../include -I../../../libs/fmt -I../../../src
LIB_PATHS=-L../../../libs/fmt/fmt
LDFLAGS=-lfmt
# source and executable
LOG_SRC_PATH=../../..
SOURCES=main.cpp async_log.o sync_log.o stderr.o stdout.o default.o levels.o log.o
EXECUTABLE=a.out
# release target
all: $(EXECUTABLE)
$(EXECUTABLE): $(SOURCES)
$(CXX) $(CFLAGS) $(INCLUDE_PATHS) $(SOURCES) -o $(EXECUTABLE) $(LIB_PATHS) $(LDFLAGS)
# TODO: auto
async_log.o: ../../../src/logging/logs/async_log.cpp
$(CXX) $(CFLAGS) $(INCLUDE_PATHS) -c ../../../src/logging/logs/async_log.cpp
sync_log.o: ../../../src/logging/logs/sync_log.cpp
$(CXX) $(CFLAGS) $(INCLUDE_PATHS) -c ../../../src/logging/logs/sync_log.cpp
stderr.o: ../../../src/logging/streams/stderr.cpp
$(CXX) $(CFLAGS) $(INCLUDE_PATHS) -c ../../../src/logging/streams/stderr.cpp
stdout.o: ../../../src/logging/streams/stdout.cpp
$(CXX) $(CFLAGS) $(INCLUDE_PATHS) -c ../../../src/logging/streams/stdout.cpp
default.o: ../../../src/logging/default.cpp
$(CXX) $(CFLAGS) $(INCLUDE_PATHS) -c ../../../src/logging/default.cpp
levels.o: ../../../src/logging/levels.cpp
$(CXX) $(CFLAGS) $(INCLUDE_PATHS) -c ../../../src/logging/levels.cpp
log.o: ../../../src/logging/log.cpp
$(CXX) $(CFLAGS) $(INCLUDE_PATHS) -c ../../../src/logging/log.cpp
.PHONY:
clean:
rm -f a.out
rm -f *.o

View File

@ -1,20 +0,0 @@
#include <iostream>
#include "logging/default.hpp"
#include "logging/streams/stdout.hpp"
int main(void)
{
// init logging
logging::init_sync();
logging::log->pipe(std::make_unique<Stdout>());
// get Main logger
Logger logger;
logger = logging::log->logger("Main");
logger.info("{}", logging::log->type());
std::string* test = new std::string("test_value");
return 0;
}

View File

@ -1,15 +0,0 @@
#include <iostream>
#include <vector>
#include "utils/iterator/map.hpp"
int main(void)
{
std::vector<int> test{1,2,3};
for (auto item : test) {
std::cout << item << std::endl;
}
return 0;
}

47
tests/unit/CMakeLists.txt Normal file
View File

@ -0,0 +1,47 @@
find_package(Threads REQUIRED)
# set current directory name as a test type
get_filename_component(test_type ${CMAKE_CURRENT_SOURCE_DIR} NAME)
# get all cpp abs file names recursively starting from current directory
file(GLOB_RECURSE test_type_cpps *.cpp)
message(STATUS "Available ${test_type} cpp files are: ${test_type_cpps}")
# for each cpp file build binary and register test
foreach(test_cpp ${test_type_cpps})
# get exec name (remove extension from the abs path)
get_filename_component(exec_name ${test_cpp} NAME_WE)
# set target name in format {project_name}_{test_type}_{exec_name}
set(target_name ${project_name}_${test_type}_${exec_name})
# build exec file
add_executable(${target_name} ${test_cpp})
set_property(TARGET ${target_name} PROPERTY CXX_STANDARD ${cxx_standard})
# OUTPUT_NAME sets the real name of a target when it is built and can be
# used to help create two targets of the same name even though CMake
# requires unique logical target names
set_target_properties(${target_name} PROPERTIES OUTPUT_NAME ${exec_name})
# link libraries
# filesystem
target_link_libraries(${target_name} stdc++fs)
# threads (cross-platform)
target_link_libraries(${target_name} Threads::Threads)
# memgraph lib
target_link_libraries(${target_name} memgraph_lib)
# fmt format lib
target_link_libraries(${target_name} ${fmt_static_lib})
# yaml parser lib
target_link_libraries(${target_name} ${yaml_static_lib})
# cypher lib
target_link_libraries(${target_name} cypher_lib)
# dynamic lib
target_link_libraries(${target_name} dl)
# register test
add_test(${target_name} ${exec_name})
endforeach()

View File

@ -19,7 +19,7 @@ auto load_queries()
{
std::vector<std::string> queries;
fs::path queries_path = "data/cypher_queries";
fs::path queries_path = "../data/queries/cypher";
std::string query_file_extension = "cypher";
for (auto& directory_entry :