diff --git a/CMakeLists.txt b/CMakeLists.txt index be0cadcb9..5908abd4d 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -386,7 +386,8 @@ set(MEMGRAPH_BUILD_NAME # memgraph main executable if (MEMGRAPH) add_executable(${MEMGRAPH_BUILD_NAME} ${src_dir}/memgraph_bolt.cpp) - set_property(TARGET ${MEMGRAPH_BUILD_NAME} PROPERTY CXX_STANDARD ${cxx_standard}) + set_property(TARGET ${MEMGRAPH_BUILD_NAME} + PROPERTY CXX_STANDARD ${cxx_standard}) target_link_libraries(${MEMGRAPH_BUILD_NAME} memgraph_lib) target_link_libraries(${MEMGRAPH_BUILD_NAME} stdc++fs) target_link_libraries(${MEMGRAPH_BUILD_NAME} Threads::Threads) @@ -417,3 +418,26 @@ file(GLOB_RECURSE __SOURCES ${CMAKE_SOURCE_DIR}/src/*.hpp ${CMAKE_SOURCE_DIR}/src/*.cpp) add_executable(__refactor_target ${__SOURCES}) set_target_properties(__refactor_target PROPERTIES EXCLUDE_FROM_ALL 1) + +# targets to check compilability of all hardcoded query plans +# that is a first step in integration testing +# integration testing phases should be +# 1. compilation of all hardcoded query plans +# 2. query plan execution agains empty database and injected OutputStream +# 3. integration tests for all pilot/clients written in cucumber +# the following targets address only the first phase +file(GLOB __HARDCODED_SOURCES + ${CMAKE_SOURCE_DIR}/tests/integration/hardcoded_query/*.cpp) +foreach(file_path ${__HARDCODED_SOURCES}) + get_filename_component(file_name ${file_path} NAME_WE) + set(target_name __${file_name}_hardcoded_target) + add_executable(${target_name} ${CMAKE_SOURCE_DIR}/libs/__main.cpp + ${file_path}) + target_link_libraries(${target_name} memgraph_lib) + target_link_libraries(${target_name} fmt) + target_link_libraries(${target_name} Threads::Threads) + set_property(TARGET ${target_name} PROPERTY CXX_STANDARD ${cxx_standard}) + set_target_properties(${target_name} + PROPERTIES RUNTIME_OUTPUT_DIRECTORY + "${CMAKE_BINARY_DIR}/__hardcoded_targets") +endforeach() diff --git a/release/alpha.dockerfile b/release/alpha.dockerfile index 3654a0c4c..da50b48f4 100644 --- a/release/alpha.dockerfile +++ b/release/alpha.dockerfile @@ -1,14 +1,13 @@ FROM ubuntu:16.04 RUN apt-get update \ - && apt-get install -y clang libssl-dev \ + && apt-get install -y clang uuid-dev \ && apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* -ENV BINARY_NAME memgraph_414_dba2610_dev_debug +ENV BINARY_NAME memgraph_552_545344b_mg_release_debug ENV MEMGRAPH_CONFIG /memgraph/config/memgraph.yaml COPY $BINARY_NAME /memgraph -COPY libs/fmt /libs/fmt WORKDIR /memgraph diff --git a/release/alpha.sh b/release/alpha.sh index c9d99041d..2540a23f2 100755 --- a/release/alpha.sh +++ b/release/alpha.sh @@ -1,33 +1,20 @@ #!/bin/bash -# Initial version of script that is going to be used for release build. +# Initial version of script that is going to be used for release builds. # NOTE: do not run this script as a super user -# TODO: enable options related to lib - echo "Memgraph Release Building..." -cd ../build -# get most recent version of memgraph exe -exe_name=`ls -t memgraph_* | head -1` - -cd ../release -# create libs dir -mkdir -p libs - -# initialize all libs -# cp ../libs/setup.sh libs/setup.sh -# ./libs/setup.sh - -# just copy all libs -cp -r ../libs ./ - # compile memgraph cd ../build -# rm -rf ./* -# cmake -DCMAKE_C_COMPILER=clang -DCMAKE_CXX_COMPILER=clang++ -DCMAKE_BUILD_TYPE:String=debug .. -# make -j 4 +rm -rf ./* +cmake -DCMAKE_BUILD_TYPE:String=debug .. +make -j8 +make copy_hardcoded_queries + +# get the most recent version of memgraph exe +exe_name=`ls -t memgraph_* | head -1` # create dst directory mkdir -p ../release/${exe_name} @@ -39,13 +26,10 @@ rm -rf ../release/${exe_name}/include cp -r include ../release/${exe_name}/include cp -r template ../release/${exe_name}/template cp -r ../config ../release/${exe_name}/config +cp -r ../libs ../release/${exe_name}/libs -# create compiled folder and copy hard coded queries -mkdir -p ../release/${exe_name}/compiled/cpu/hardcode -rm -rf ../release/${exe_name}/compiled/cpu/hardcode/* -cp ../tests/integration/hardcoded_query/*.cpp ../release/${exe_name}/compiled/cpu/hardcode -cp ../tests/integration/hardcoded_query/*.hpp ../release/${exe_name}/compiled/cpu/hardcode +# copy the hardcoded query plan +# TODO: minimise the header files +cp -r compiled ../release/${exe_name}/ echo "Memgraph Release Building DONE" - - diff --git a/src/communication/bolt/v1/states/executor.cpp b/src/communication/bolt/v1/states/executor.cpp index b5cbc028b..f9becb813 100644 --- a/src/communication/bolt/v1/states/executor.cpp +++ b/src/communication/bolt/v1/states/executor.cpp @@ -1,6 +1,7 @@ #include "communication/bolt/v1/states/executor.hpp" #include "communication/bolt/v1/messaging/codes.hpp" #include "database/graph_db_accessor.hpp" +#include "query/frontend/opencypher/parser.hpp" #ifdef BARRIER #include "barrier/barrier.cpp" @@ -24,17 +25,35 @@ State *Executor::run(Session &session) { q.statement = session.decoder.read_string(); + // TODO: refactor bolt exception handling (Ferencevic) try { return this->run(session, q); - // TODO: RETURN success MAYBE + } catch (const frontend::opencypher::SyntaxException &e) { + session.output_stream.write_failure( + {{"code", "Memgraph.SyntaxException"}, {"message", "Syntax error"}}); + session.output_stream.send(); + return session.bolt.states.error.get(); + } catch (const backend::cpp::GeneratorException &e) { + session.output_stream.write_failure( + {{"code", "Memgraph.GeneratorException"}, + {"message", "Unsupported query"}}); + session.output_stream.send(); + return session.bolt.states.error.get(); } catch (const QueryEngineException &e) { session.output_stream.write_failure( - {{"code", "Memgraph.QueryEngineException"}, {"message", e.what()}}); + {{"code", "Memgraph.QueryEngineException"}, + {"message", "Query engine was unable to execute the query"}}); + session.output_stream.send(); + return session.bolt.states.error.get(); + } catch (const StacktraceException &e) { + session.output_stream.write_failure( + {{"code", "Memgraph.StacktraceException"}, + {"message", "Unknow exception"}}); session.output_stream.send(); return session.bolt.states.error.get(); } catch (std::exception &e) { session.output_stream.write_failure( - {{"code", "Memgraph.Exception"}, {"message", e.what()}}); + {{"code", "Memgraph.Exception"}, {"message", "unknow exception"}}); session.output_stream.send(); return session.bolt.states.error.get(); } diff --git a/src/query/backend/cpp/generator.hpp b/src/query/backend/cpp/generator.hpp index 0175d2aa6..d823cf4ac 100644 --- a/src/query/backend/cpp/generator.hpp +++ b/src/query/backend/cpp/generator.hpp @@ -12,6 +12,12 @@ namespace cpp { using namespace antlr4; +class GeneratorException : public BasicException { + public: + using BasicException::BasicException; + GeneratorException() : BasicException("") {} +}; + /** * Traverse Antlr tree::ParseTree generated from Cypher grammar and generate * C++. @@ -23,9 +29,9 @@ class Generator { */ Generator(tree::ParseTree *tree, const std::string &query, const uint64_t stripped_hash, const fs::path &path) { + throw GeneratorException("unsupported query"); CypherMainVisitor visitor; visitor.visit(tree); - throw std::runtime_error("TODO: implementation"); } }; } diff --git a/src/query/engine.hpp b/src/query/engine.hpp index cedb49884..1f1fba69a 100644 --- a/src/query/engine.hpp +++ b/src/query/engine.hpp @@ -64,24 +64,15 @@ class QueryEngine : public Loggable { */ auto Run(const std::string &query, GraphDbAccessor &db_accessor, Stream &stream) { - try { - auto preprocessed = preprocessor.preprocess(query); - auto plan = LoadCypher(preprocessed); - auto result = plan->run(db_accessor, preprocessed.arguments, stream); - if (UNLIKELY(!result)) { - // info because it might be something like deadlock in which - // case one thread is stopped and user has try again - logger.info("Unable to execute query (execution returned false)"); - } - return result; - } catch (QueryEngineException &e) { - logger.error("QueryEngineException: {}", std::string(e.what())); - throw e; - } catch (std::exception &e) { - throw StacktraceException(e.what()); - } catch (...) { - throw StacktraceException("unknown query engine exception"); + auto preprocessed = preprocessor.preprocess(query); + auto plan = LoadCypher(preprocessed); + auto result = plan->run(db_accessor, preprocessed.arguments, stream); + if (UNLIKELY(!result)) { + // info because it might be something like deadlock in which + // case one thread is stopped and user has try again + logger.info("Unable to execute query (execution returned false)"); } + return result; } /** diff --git a/src/query/frontend/opencypher/parser.hpp b/src/query/frontend/opencypher/parser.hpp index cee963d78..5106c5ec7 100644 --- a/src/query/frontend/opencypher/parser.hpp +++ b/src/query/frontend/opencypher/parser.hpp @@ -13,7 +13,7 @@ namespace opencypher { using namespace antlropencypher; using namespace antlr4; -class SyntaxException : BasicException { +class SyntaxException : public BasicException { public: SyntaxException() : BasicException("") {} }; diff --git a/src/query/plan_compiler.hpp b/src/query/plan_compiler.hpp index 42460d6fa..e6b5b175c 100644 --- a/src/query/plan_compiler.hpp +++ b/src/query/plan_compiler.hpp @@ -48,6 +48,9 @@ class PlanCompiler : public Loggable { #ifdef LOG_NO_ERROR flags += " -DLOG_NO_ERROR"; #endif +#ifdef DEBUG_ASSERT_ON + flags += " -DDEBUG_ASSERT_ON"; +#endif // TODO: load from config (generate compile command) // generate compile command @@ -60,9 +63,8 @@ class PlanCompiler : public Loggable { "-std=c++1y", // compile flags in_file, // input file "-o", out_file, // ouput file - "-I./include", // include paths - "-I../include", "-I../../include", "-I../../../include", - "-I../libs/fmt", "-I../../libs/fmt", "-I../../../libs/fmt", + "-I./include", "-I../include", "-I../../include", "-I../../../include", + "-I./libs/fmt", "-I../libs/fmt", "-I../../libs/fmt", "-I../../../libs/fmt", "-L./ -L../ -L../../", "-lmemgraph_pic", "-shared -fPIC" // shared library flags ); diff --git a/src/query/util.hpp b/src/query/util.hpp index 347f35b18..191bf07ff 100644 --- a/src/query/util.hpp +++ b/src/query/util.hpp @@ -40,8 +40,8 @@ std::string extract_query(const fs::path &path) { auto query = utils::trim(line.substr(pos + query_mark.size())); while (i + 1 < (int)lines.size() && lines[i + 1].find(comment_mark) != std::string::npos) { - query += utils::trim(lines[i + 1].substr(lines[i + 1].find(comment_mark) + - comment_mark.length())); + query += lines[i + 1].substr(lines[i + 1].find(comment_mark) + + comment_mark.length()); ++i; } return query; diff --git a/tests/integration/hardcoded_query/clique.hpp b/tests/integration/hardcoded_query/clique.hpp index 6dec7455d..a5c8d8314 100644 --- a/tests/integration/hardcoded_query/clique.hpp +++ b/tests/integration/hardcoded_query/clique.hpp @@ -65,7 +65,7 @@ class Bitset { * @return intersection. */ Bitset<TStore> Intersect(const Bitset<TStore> &other) { - debug_assert(this->blocks_.size() == other.size(), + debug_assert(this->blocks_.size() == other.blocks_.size(), "Bitsets are not of equal size."); Bitset<TStore> ret(this->blocks_.size() * this->block_size_); for (int i = 0; i < (int)this->blocks_.size(); ++i) { @@ -117,8 +117,12 @@ enum CliqueQuery { SCORE_AND_LIMIT, FIND_ALL }; bool run_general_query(GraphDbAccessor &db_accessor, const TypedValueStore<> &args, Stream &stream, enum CliqueQuery query_type) { - stream.write_fields( - {"a.garment_id", "b.garment_id", "c.garment_id", "d.garment_id"}); + if (query_type == CliqueQuery::FIND_ALL) + stream.write_fields( + {"a.garment_id", "b.garment_id", "c.garment_id", "d.garment_id"}); + else + stream.write_fields({"a.garment_id", "b.garment_id", "c.garment_id", + "d.garment_id", "score"}); std::vector<VertexAccessor> vertices = db_accessor.vertices(); std::vector<EdgeAccessor> edges = db_accessor.edges(); @@ -292,7 +296,6 @@ bool run_general_query(GraphDbAccessor &db_accessor, if (query_type == CliqueQuery::SCORE_AND_LIMIT) stream.write(calc_score(results[i])); } - stream.write_empty_fields(); stream.write_meta("r"); db_accessor.transaction_.commit(); return true; diff --git a/tests/integration/hardcoded_query/delete_all.cpp b/tests/integration/hardcoded_query/delete_all.cpp index f0303654f..5af43f6f6 100644 --- a/tests/integration/hardcoded_query/delete_all.cpp +++ b/tests/integration/hardcoded_query/delete_all.cpp @@ -14,6 +14,8 @@ class CPUPlan : public PlanInterface<Stream> { bool run(GraphDbAccessor &db_accessor, const TypedValueStore<> &args, Stream &stream) { for (auto v : db_accessor.vertices()) db_accessor.detach_remove_vertex(v); + stream.write_empty_fields(); + stream.write_meta("rw"); db_accessor.transaction_.commit(); return true; } diff --git a/tests/integration/hardcoded_query/match_garment_set_label_return_labels.cpp b/tests/integration/hardcoded_query/match_garment_set_label_return_labels.cpp deleted file mode 100644 index fff7b3a59..000000000 --- a/tests/integration/hardcoded_query/match_garment_set_label_return_labels.cpp +++ /dev/null @@ -1,46 +0,0 @@ -#include <iostream> -#include <string> - -#include "query/plan_interface.hpp" -#include "storage/edge_accessor.hpp" -#include "storage/vertex_accessor.hpp" -#include "using.hpp" - -using std::cout; -using std::endl; - -// Query: MATCH (g:garment {garment_id: 1234}) SET g:FF RETURN labels(g) - -class CPUPlan : public PlanInterface<Stream> { - public: - bool run(GraphDbAccessor &db_accessor, const TypedValueStore<> &args, - Stream &stream) { - stream.write_field("labels(g)"); - for (auto vertex : db_accessor.vertices()) { - if (vertex.has_label(db_accessor.label("garment"))) { - auto prop = vertex.PropsAt(db_accessor.property("garment_id")); - if (prop.type_ == TypedValue::Type::Null) continue; - auto cmp = prop == args.at(0); - if (cmp.type_ != TypedValue::Type::Bool) continue; - if (cmp.Value<bool>() != true) continue; - vertex.add_label(db_accessor.label("FF")); - auto &labels = vertex.labels(); - stream.write_record(); - stream.write_list_header(1); - stream.write_list_header(labels.size()); - for (const GraphDb::Label &label : labels) { - stream.write(label); - } - stream.chunk(); - } - } - stream.write_meta("rw"); - return db_accessor.transaction_.commit(); - } - - ~CPUPlan() {} -}; - -extern "C" PlanInterface<Stream> *produce() { return new CPUPlan(); } - -extern "C" void destruct(PlanInterface<Stream> *p) { delete p; }