work in progress; TODO: change commit message
This commit is contained in:
parent
58b9c45ff2
commit
7b3c4c270e
@ -132,7 +132,6 @@ FILE(COPY ${include_dir}/utils/counters/atomic_counter.hpp DESTINATION ${build_i
|
||||
FILE(COPY ${include_dir}/utils/counters/simple_counter.hpp DESTINATION ${build_include_dir}/utils/counters)
|
||||
FILE(COPY ${include_dir}/utils/random/fast_binomial.hpp DESTINATION ${build_include_dir}/utils/random)
|
||||
FILE(COPY ${include_dir}/utils/random/xorshift128plus.hpp DESTINATION ${build_include_dir}/utils/random)
|
||||
FILE(COPY ${include_dir}/utils/exceptions/basic_exception.hpp DESTINATION ${build_include_dir}/utils/exceptions)
|
||||
FILE(COPY ${include_dir}/utils/datetime/timestamp.hpp DESTINATION ${build_include_dir}/utils/datetime)
|
||||
FILE(COPY ${include_dir}/utils/datetime/datetime_error.hpp DESTINATION ${build_include_dir}/utils/datetime)
|
||||
FILE(COPY ${include_dir}/utils/types/byte.hpp DESTINATION ${build_include_dir}/utils/types)
|
||||
@ -141,6 +140,10 @@ FILE(COPY ${include_dir}/utils/option.hpp DESTINATION ${build_include_dir}/utils
|
||||
FILE(COPY ${include_dir}/utils/border.hpp DESTINATION ${build_include_dir}/utils)
|
||||
FILE(COPY ${include_dir}/utils/order.hpp DESTINATION ${build_include_dir}/utils)
|
||||
FILE(COPY ${include_dir}/utils/numerics/saturate.hpp DESTINATION ${build_include_dir}/utils/numerics)
|
||||
FILE(COPY ${include_dir}/utils/memory/stack_allocator.hpp DESTINATION ${build_include_dir}/utils/memory)
|
||||
FILE(COPY ${include_dir}/utils/memory/block_allocator.hpp DESTINATION ${build_include_dir}/utils/memory)
|
||||
FILE(COPY ${include_dir}/utils/exceptions/basic_exception.hpp DESTINATION ${build_include_dir}/utils/exceptions)
|
||||
FILE(COPY ${include_dir}/utils/exceptions/out_of_memory.hpp DESTINATION ${build_include_dir}/utils/exceptions)
|
||||
|
||||
FILE(COPY ${include_dir}/utils/iterator/iterator_base.hpp DESTINATION ${build_include_dir}/utils/iterator)
|
||||
FILE(COPY ${include_dir}/utils/iterator/virtual_iter.hpp DESTINATION ${build_include_dir}/utils/iterator)
|
||||
|
@ -1,5 +1,7 @@
|
||||
#pragma once
|
||||
|
||||
#include <experimental/filesystem>
|
||||
|
||||
#include "database/db.hpp"
|
||||
#include "logging/default.hpp"
|
||||
#include "query/exception/query_engine.hpp"
|
||||
@ -13,6 +15,8 @@
|
||||
* -> [code_compiler] -> code_executor
|
||||
*/
|
||||
|
||||
namespace fs = std::experimental::filesystem;
|
||||
|
||||
// query engine has to be aware of the Stream because Stream
|
||||
// is passed to the dynamic shared library
|
||||
template <typename Stream>
|
||||
@ -44,6 +48,17 @@ public:
|
||||
}
|
||||
}
|
||||
|
||||
// preload functionality
|
||||
auto load(const uint64_t hash, const fs::path& path)
|
||||
{
|
||||
program_loader.load(hash, path);
|
||||
}
|
||||
|
||||
auto load(const std::string& query)
|
||||
{
|
||||
program_loader.load(query);
|
||||
}
|
||||
|
||||
protected:
|
||||
Logger logger;
|
||||
|
||||
|
@ -20,9 +20,6 @@ public:
|
||||
std::string flags;
|
||||
|
||||
// TODO: sync this with cmake configuration
|
||||
#ifdef BARRIER
|
||||
flags += " -DBARRIER";
|
||||
#endif
|
||||
#ifdef NDEBUG
|
||||
flags += " -DNDEBUG -O2";
|
||||
#endif
|
||||
@ -53,9 +50,6 @@ public:
|
||||
"-I../include",
|
||||
"-I../libs/fmt", // TODO: load from config
|
||||
"-I../../libs/fmt", "-L./ -L../",
|
||||
#ifdef BARRIER
|
||||
"-lbarrier_pic",
|
||||
#endif
|
||||
"-lmemgraph_pic",
|
||||
"-shared -fPIC" // shared library flags
|
||||
);
|
||||
@ -67,6 +61,8 @@ public:
|
||||
|
||||
// if compilation has failed throw exception
|
||||
if (compile_status == -1) {
|
||||
logger.debug("FAIL: Query Code Compilation: {} -> {}", in_file,
|
||||
out_file);
|
||||
throw PlanCompilationException(
|
||||
"Code compilation error. Generated code is not compilable or "
|
||||
"compilation settings are wrong");
|
||||
|
@ -3,6 +3,7 @@
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <unordered_map>
|
||||
#include <experimental/filesystem>
|
||||
|
||||
#include "config/config.hpp"
|
||||
#include "logging/default.hpp"
|
||||
@ -16,6 +17,8 @@
|
||||
#include "utils/file.hpp"
|
||||
#include "utils/hashing/fnv.hpp"
|
||||
|
||||
namespace fs = std::experimental::filesystem;
|
||||
|
||||
template <typename Stream>
|
||||
class ProgramLoader
|
||||
{
|
||||
@ -26,6 +29,16 @@ public:
|
||||
|
||||
ProgramLoader() : logger(logging::log->logger("PlanLoader")) {}
|
||||
|
||||
// TODO: decouple load(query) method
|
||||
|
||||
auto load(const uint64_t hash, const fs::path &path)
|
||||
{
|
||||
// TODO: get lib path (that same folder as path folder or from config)
|
||||
// TODO: compile
|
||||
// TODO: dispose the old lib
|
||||
// TODO: store the compiled lib
|
||||
}
|
||||
|
||||
auto load(const std::string &query)
|
||||
{
|
||||
auto preprocessed = preprocessor.preprocess(query);
|
||||
|
@ -8,8 +8,6 @@
|
||||
auto VertexAccessor::out() const
|
||||
{
|
||||
DbTransaction &t = this->db;
|
||||
std::cout << "VA OUT" << std::endl;
|
||||
std::cout << record->data.out.size() << std::endl;
|
||||
return iter::make_map(iter::make_iter_ref(record->data.out),
|
||||
[&](auto e) -> auto { return EdgeAccessor(*e, t); });
|
||||
}
|
||||
|
@ -6,4 +6,6 @@ class NotYetImplemented : public BasicException
|
||||
{
|
||||
public:
|
||||
using BasicException::BasicException;
|
||||
|
||||
NotYetImplemented() : BasicException("") {}
|
||||
};
|
||||
|
@ -14,9 +14,13 @@ using std::endl;
|
||||
|
||||
// Dressipi astar query of 4 clicks.
|
||||
|
||||
// TODO: push down appropriate
|
||||
using Stream = std::ostream;
|
||||
|
||||
// TODO: figure out from the pattern in a query
|
||||
constexpr size_t max_depth = 3;
|
||||
|
||||
// TODO: from query LIMIT 10
|
||||
constexpr size_t limit = 10;
|
||||
|
||||
class Node
|
||||
@ -79,10 +83,12 @@ void astar(DbAccessor &t, plan_args_t &args, Stream &stream)
|
||||
auto cmp = [](Node *left, Node *right) { return left->cost > right->cost; };
|
||||
std::priority_queue<Node *, std::vector<Node *>, decltype(cmp)> queue(cmp);
|
||||
|
||||
// TODO: internal id independent
|
||||
auto start_vr = t.vertex_find(Id(args[0].as<Int64>().value()));
|
||||
if (!start_vr.is_present())
|
||||
{
|
||||
// stream.write_failure({{}});
|
||||
// TODO: stream failure
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
@ -96,15 +102,19 @@ void astar(DbAccessor &t, plan_args_t &args, Stream &stream)
|
||||
auto now = queue.top();
|
||||
queue.pop();
|
||||
|
||||
if (max_depth <= now->depth)
|
||||
if (now->depth >= max_depth)
|
||||
{
|
||||
// stream.write_success_empty();
|
||||
// best.push_back(now);
|
||||
// TODO: stream the result
|
||||
|
||||
count++;
|
||||
|
||||
if (count >= limit)
|
||||
{
|
||||
// the limit was reached -> STOP the execution
|
||||
break;
|
||||
}
|
||||
|
||||
// if the limit wasn't reached -> POP the next vertex
|
||||
continue;
|
||||
}
|
||||
|
||||
@ -130,6 +140,8 @@ public:
|
||||
{
|
||||
DbAccessor t(db);
|
||||
|
||||
// TODO: find node
|
||||
|
||||
astar(t, args, stream);
|
||||
|
||||
return t.commit();
|
||||
|
@ -2,6 +2,8 @@
|
||||
|
||||
# Initial version of script that is going to be used for release build.
|
||||
|
||||
# NOTE: do not run this script as a super user
|
||||
|
||||
# TODO: enable options related to lib
|
||||
|
||||
echo "Memgraph Release Building..."
|
||||
@ -33,12 +35,14 @@ mkdir -p ../release/${exe_name}
|
||||
# copy all relevant files
|
||||
cp ${exe_name} ../release/${exe_name}/memgraph
|
||||
cp libmemgraph_pic.a ../release/${exe_name}/libmemgraph_pic.a
|
||||
rm -rf ../release/${exe_name}/include
|
||||
cp -r include ../release/${exe_name}/include
|
||||
cp -r template ../release/${exe_name}/template
|
||||
cp -r ../config ../release/${exe_name}/config
|
||||
|
||||
# create compiled folder and copy hard coded queries
|
||||
mkdir -p ../release/${exe_name}/compiled/cpu/hardcode
|
||||
rm -rf ../release/${exe_name}/compiled/cpu/hardcode/*
|
||||
cp ../tests/integration/hardcoded_query/*.cpp ../release/${exe_name}/compiled/cpu/hardcode
|
||||
cp ../tests/integration/hardcoded_query/*.hpp ../release/${exe_name}/compiled/cpu/hardcode
|
||||
|
||||
|
70
tests/data/queries/core/dressipi_graph.txt
Normal file
70
tests/data/queries/core/dressipi_graph.txt
Normal file
@ -0,0 +1,70 @@
|
||||
CREATE (g:garment {garment_id: 1234, garment_category_id: 1, conceals: 30}) RETURN g
|
||||
MATCH(g:garment {garment_id: 1234}) SET g:AA RETURN g
|
||||
MATCH(g:garment {garment_id: 1234}) SET g:BB RETURN g
|
||||
MATCH(g:garment {garment_id: 1234}) SET g:EE RETURN g
|
||||
CREATE (g:garment {garment_id: 2345, garment_category_id: 6, reveals: 10}) RETURN g
|
||||
MATCH(g:garment {garment_id: 2345}) SET g:CC RETURN g
|
||||
MATCH(g:garment {garment_id: 2345}) SET g:DD RETURN g
|
||||
CREATE (g:garment {garment_id: 3456, garment_category_id: 8}) RETURN g
|
||||
MATCH(g:garment {garment_id: 3456}) SET g:CC RETURN g
|
||||
MATCH(g:garment {garment_id: 3456}) SET g:DD RETURN g
|
||||
CREATE (g:garment {garment_id: 4567, garment_category_id: 15}) RETURN g
|
||||
MATCH(g:garment {garment_id: 4567}) SET g:AA RETURN g
|
||||
MATCH(g:garment {garment_id: 4567}) SET g:BB RETURN g
|
||||
MATCH(g:garment {garment_id: 4567}) SET g:DD RETURN g
|
||||
CREATE (g:garment {garment_id: 5678, garment_category_id: 19}) RETURN g
|
||||
MATCH(g:garment {garment_id: 5678}) SET g:BB RETURN g
|
||||
MATCH(g:garment {garment_id: 5678}) SET g:CC RETURN g
|
||||
MATCH(g:garment {garment_id: 5678}) SET g:EE RETURN g
|
||||
CREATE (g:garment {garment_id: 6789, garment_category_id: 3}) RETURN g
|
||||
MATCH(g:garment {garment_id: 6789}) SET g:AA RETURN g
|
||||
MATCH(g:garment {garment_id: 6789}) SET g:DD RETURN g
|
||||
MATCH(g:garment {garment_id: 6789}) SET g:EE RETURN g
|
||||
CREATE (g:garment {garment_id: 7890, garment_category_id: 25}) RETURN g
|
||||
MATCH(g:garment {garment_id: 7890}) SET g:AA RETURN g
|
||||
MATCH(g:garment {garment_id: 7890}) SET g:BB RETURN g
|
||||
MATCH(g:garment {garment_id: 7890}) SET g:CC RETURN g
|
||||
MATCH(g:garment {garment_id: 7890}) SET g:EE RETURN g
|
||||
|
||||
MATCH (g1:garment {garment_id: 1234}), (g2:garment {garment_id: 4567}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
|
||||
MATCH (g1:garment {garment_id: 1234}), (g2:garment {garment_id: 5678}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
|
||||
MATCH (g1:garment {garment_id: 1234}), (g2:garment {garment_id: 6789}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
|
||||
MATCH (g1:garment {garment_id: 1234}), (g2:garment {garment_id: 7890}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
|
||||
MATCH (g1:garment {garment_id: 4567}), (g2:garment {garment_id: 6789}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
|
||||
MATCH (g1:garment {garment_id: 4567}), (g2:garment {garment_id: 7890}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
|
||||
MATCH (g1:garment {garment_id: 4567}), (g2:garment {garment_id: 5678}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
|
||||
MATCH (g1:garment {garment_id: 6789}), (g2:garment {garment_id: 7890}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
|
||||
MATCH (g1:garment {garment_id: 5678}), (g2:garment {garment_id: 7890}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
|
||||
MATCH (g1:garment {garment_id: 2345}), (g2:garment {garment_id: 3456}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
|
||||
MATCH (g1:garment {garment_id: 2345}), (g2:garment {garment_id: 5678}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
|
||||
MATCH (g1:garment {garment_id: 2345}), (g2:garment {garment_id: 6789}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
|
||||
MATCH (g1:garment {garment_id: 2345}), (g2:garment {garment_id: 7890}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
|
||||
MATCH (g1:garment {garment_id: 2345}), (g2:garment {garment_id: 4567}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
|
||||
MATCH (g1:garment {garment_id: 3456}), (g2:garment {garment_id: 5678}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
|
||||
MATCH (g1:garment {garment_id: 3456}), (g2:garment {garment_id: 6789}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
|
||||
MATCH (g1:garment {garment_id: 3456}), (g2:garment {garment_id: 7890}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
|
||||
MATCH (g1:garment {garment_id: 3456}), (g2:garment {garment_id: 4567}) CREATE (g1)-[r:default_outfit]->(g2) RETURN r
|
||||
|
||||
CREATE (p:profile {profile_id: 111, partner_id: 55, reveals: 30}) RETURN p
|
||||
CREATE (p:profile {profile_id: 112, partner_id: 55}) RETURN p
|
||||
CREATE (p:profile {profile_id: 112, partner_id: 77, conceals: 10}) RETURN p
|
||||
|
||||
MATCH (p:profile {profile_id: 111, partner_id: 55}), (g:garment {garment_id: 1234}) CREATE (p)-[s:score]->(g) SET s.score=1500 RETURN s
|
||||
MATCH (p:profile {profile_id: 111, partner_id: 55}), (g:garment {garment_id: 2345}) CREATE (p)-[s:score]->(g) SET s.score=1200 RETURN s
|
||||
MATCH (p:profile {profile_id: 111, partner_id: 55}), (g:garment {garment_id: 3456}) CREATE (p)-[s:score]->(g) SET s.score=1000 RETURN s
|
||||
MATCH (p:profile {profile_id: 111, partner_id: 55}), (g:garment {garment_id: 4567}) CREATE (p)-[s:score]->(g) SET s.score=1000 RETURN s
|
||||
MATCH (p:profile {profile_id: 111, partner_id: 55}), (g:garment {garment_id: 6789}) CREATE (p)-[s:score]->(g) SET s.score=1500 RETURN s
|
||||
MATCH (p:profile {profile_id: 111, partner_id: 55}), (g:garment {garment_id: 7890}) CREATE (p)-[s:score]->(g) SET s.score=1800 RETURN s
|
||||
MATCH (p:profile {profile_id: 112, partner_id: 55}), (g:garment {garment_id: 1234}) CREATE (p)-[s:score]->(g) SET s.score=2000 RETURN s
|
||||
MATCH (p:profile {profile_id: 112, partner_id: 55}), (g:garment {garment_id: 4567}) CREATE (p)-[s:score]->(g) SET s.score=1500 RETURN s
|
||||
MATCH (p:profile {profile_id: 112, partner_id: 55}), (g:garment {garment_id: 5678}) CREATE (p)-[s:score]->(g) SET s.score=1000 RETURN s
|
||||
MATCH (p:profile {profile_id: 112, partner_id: 55}), (g:garment {garment_id: 6789}) CREATE (p)-[s:score]->(g) SET s.score=1600 RETURN s
|
||||
MATCH (p:profile {profile_id: 112, partner_id: 55}), (g:garment {garment_id: 7890}) CREATE (p)-[s:score]->(g) SET s.score=1900 RETURN s
|
||||
MATCH (p:profile {profile_id: 112, partner_id: 77}), (g:garment {garment_id: 1234}) CREATE (p)-[s:score]->(g) SET s.score=1500 RETURN s
|
||||
MATCH (p:profile {profile_id: 112, partner_id: 77}), (g:garment {garment_id: 2345}) CREATE (p)-[s:score]->(g) SET s.score=1300 RETURN s
|
||||
MATCH (p:profile {profile_id: 112, partner_id: 77}), (g:garment {garment_id: 3456}) CREATE (p)-[s:score]->(g) SET s.score=1300 RETURN s
|
||||
MATCH (p:profile {profile_id: 112, partner_id: 77}), (g:garment {garment_id: 5678}) CREATE (p)-[s:score]->(g) SET s.score=1200 RETURN s
|
||||
MATCH (p:profile {profile_id: 112, partner_id: 77}), (g:garment {garment_id: 6789}) CREATE (p)-[s:score]->(g) SET s.score=1700 RETURN s
|
||||
MATCH (p:profile {profile_id: 112, partner_id: 77}), (g:garment {garment_id: 7890}) CREATE (p)-[s:score]->(g) SET s.score=1900 RETURN s
|
||||
|
||||
MATCH (a:garment)-[:default_outfit]-(b:garment)-[:default_outfit]-(c:garment)-[:default_outfit]-(d:garment)-[:default_outfit]-(a:garment)-[:default_outfit]-(c:garment), (b:garment)-[:default_outfit]-(d:garment) WHERE a.garment_id = 1234 RETURN a.garment_id, b.garment_id, c.garment_id, d.garment_id ORDER BY (a.score + b.score + c.score + d.score) DESC LIMIT 10
|
@ -1,2 +0,0 @@
|
||||
MERGE (g1:garment {garment_id: 1234})-[r:default_outfit]-(g2:garment {garment_id: 2345}) RETURN r
|
||||
MATCH (p:profile {profile_id: 111, partner_id: 55})-[s:score]-(g.garment {garment_id: 1234}) DELETE s
|
@ -633,6 +633,10 @@ auto load_dressipi_functions(Db &db)
|
||||
return t.commit();
|
||||
};
|
||||
|
||||
// Query: MATCH (a:garment)-[:default_outfit]-(b:garment)-[:default_outfit]-(c:garment)-[:default_outfit]-(d:garment)-[:default_outfit]-(a:garment)-[:default_outfit]-(c:garment), (b:garment)-[:default_outfit]-(d:garment) WHERE a.garment_id = 1234 RETURN a.garment_id, b.garment_id, c.garment_id, d.garment_id ORDER BY (a.score + b.score + c.score + d.score) DESC LIMIT 10
|
||||
// Hash: 11856262817829095719
|
||||
// TODO: automate
|
||||
|
||||
return functions;
|
||||
}
|
||||
}
|
||||
|
188
tests/integration/hardcoded_query/11856262817829095719.cpp
Normal file
188
tests/integration/hardcoded_query/11856262817829095719.cpp
Normal file
@ -0,0 +1,188 @@
|
||||
#include <iostream>
|
||||
#include <queue>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "query/i_plan_cpu.hpp"
|
||||
#include "query/util.hpp"
|
||||
#include "storage/edge_x_vertex.hpp"
|
||||
#include "storage/model/properties/all.hpp"
|
||||
#include "storage/vertex_accessor.hpp"
|
||||
#include "using.hpp"
|
||||
#include "utils/memory/stack_allocator.hpp"
|
||||
|
||||
using std::cout;
|
||||
using std::endl;
|
||||
|
||||
// Dressipi astar query of 4 clicks.
|
||||
|
||||
// TODO: figure out from the pattern in a query
|
||||
constexpr size_t max_depth = 3;
|
||||
|
||||
// TODO: from query LIMIT 10
|
||||
constexpr size_t limit = 10;
|
||||
|
||||
class Node
|
||||
{
|
||||
public:
|
||||
Node *parent = {nullptr};
|
||||
VertexPropertyType<Float> tkey;
|
||||
double cost;
|
||||
int depth = {0};
|
||||
double sum = {0.0};
|
||||
VertexAccessor vacc;
|
||||
|
||||
Node(VertexAccessor vacc, double cost,
|
||||
VertexPropertyType<Float> const &tkey)
|
||||
: cost(cost), vacc(vacc), tkey(tkey)
|
||||
{
|
||||
}
|
||||
Node(VertexAccessor vacc, double cost, Node *parent,
|
||||
VertexPropertyType<Float> const &tkey)
|
||||
: cost(cost), vacc(vacc), parent(parent), depth(parent->depth + 1),
|
||||
tkey(tkey)
|
||||
{
|
||||
}
|
||||
|
||||
double sum_vertex_score()
|
||||
{
|
||||
auto now = this;
|
||||
double sum = 0;
|
||||
do
|
||||
{
|
||||
sum += (now->vacc.at(tkey).get())->value();
|
||||
now = now->parent;
|
||||
} while (now != nullptr);
|
||||
this->sum = sum;
|
||||
return sum;
|
||||
}
|
||||
};
|
||||
|
||||
bool vertex_filter_contained(DbAccessor &t, VertexAccessor &v, Node *before)
|
||||
{
|
||||
if (v.fill())
|
||||
{
|
||||
bool found;
|
||||
do
|
||||
{
|
||||
found = false;
|
||||
before = before->parent;
|
||||
if (before == nullptr)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
} while (v.in_contains(before->vacc));
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
template <typename Stream>
|
||||
auto astar(VertexAccessor &va, DbAccessor &t, plan_args_t &, Stream &)
|
||||
{
|
||||
StackAllocator stack;
|
||||
std::vector<Node *> results;
|
||||
|
||||
// TODO: variable part (extract)
|
||||
VertexPropertyType<Float> tkey = t.vertex_property_key<Float>("score");
|
||||
|
||||
auto cmp = [](Node *left, Node *right) { return left->cost > right->cost; };
|
||||
std::priority_queue<Node *, std::vector<Node *>, decltype(cmp)> queue(cmp);
|
||||
|
||||
Node *start = new (stack.allocate<Node>()) Node(va, 0, tkey);
|
||||
queue.push(start);
|
||||
|
||||
size_t count = 0;
|
||||
do
|
||||
{
|
||||
auto now = queue.top();
|
||||
queue.pop();
|
||||
|
||||
if (now->depth >= max_depth)
|
||||
{
|
||||
results.emplace_back(now);
|
||||
|
||||
count++;
|
||||
|
||||
if (count >= limit)
|
||||
{
|
||||
// the limit was reached -> STOP the execution
|
||||
break;
|
||||
}
|
||||
|
||||
// if the limit wasn't reached -> POP the next vertex
|
||||
continue;
|
||||
}
|
||||
|
||||
iter::for_all(now->vacc.out(), [&](auto edge) {
|
||||
VertexAccessor va = edge.to();
|
||||
if (vertex_filter_contained(t, va, now))
|
||||
{
|
||||
auto cost = 1 - va.at(tkey).get()->value();
|
||||
Node *n = new (stack.allocate<Node>())
|
||||
Node(va, now->cost + cost, now, tkey);
|
||||
queue.push(n);
|
||||
}
|
||||
});
|
||||
} while (!queue.empty());
|
||||
|
||||
stack.free();
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
class PlanCPU : public IPlanCPU<Stream>
|
||||
{
|
||||
public:
|
||||
bool run(Db &db, plan_args_t &args, Stream &stream) override
|
||||
{
|
||||
DbAccessor t(db);
|
||||
|
||||
indices_t indices = {{"garment_id", 0}};
|
||||
auto properties = query_properties(indices, args);
|
||||
|
||||
auto &label = t.label_find_or_create("garment");
|
||||
auto garment_id_prop_key =
|
||||
t.vertex_property_key("garment_id", args[0].key.flags());
|
||||
|
||||
stream.write_fields(
|
||||
{{"a.garment_id", "b.garment_id", "c.garment_id", "d.garment_id"}});
|
||||
|
||||
label.index()
|
||||
.for_range(t)
|
||||
.properties_filter(t, properties)
|
||||
.for_all([&](auto va) {
|
||||
auto results = astar(va, t, args, stream);
|
||||
for (auto node : results)
|
||||
{
|
||||
node->sum_vertex_score();
|
||||
}
|
||||
std::sort(results.begin(), results.end(),
|
||||
[](Node *a, Node *b) { return a->sum < b->sum; });
|
||||
for (auto node : results)
|
||||
{
|
||||
stream.write_record();
|
||||
stream.write_list_header(max_depth + 1);
|
||||
auto current_node = node;
|
||||
do
|
||||
{
|
||||
// TODO: get property but reverser order
|
||||
stream.write(current_node->vacc.at(garment_id_prop_key)
|
||||
.template as<Float>());
|
||||
current_node = current_node->parent;
|
||||
} while (current_node != nullptr);
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
stream.write_empty_fields();
|
||||
stream.write_meta("r");
|
||||
|
||||
return t.commit();
|
||||
}
|
||||
|
||||
~PlanCPU() {}
|
||||
};
|
||||
|
||||
extern "C" IPlanCPU<Stream> *produce() { return new PlanCPU(); }
|
||||
|
||||
extern "C" void destruct(IPlanCPU<Stream> *p) { delete p; }
|
@ -8,6 +8,7 @@
|
||||
#include "utils/string/file.hpp"
|
||||
#include "utils/variadic/variadic.hpp"
|
||||
#include "utils/command_line/arguments.hpp"
|
||||
#include "stream/print_record_stream.hpp"
|
||||
|
||||
Logger logger;
|
||||
|
||||
@ -15,10 +16,14 @@ int main(int argc, char *argv[])
|
||||
{
|
||||
auto arguments = all_arguments(argc, argv);
|
||||
|
||||
PrintRecordStream stream(std::cout);
|
||||
|
||||
// POSSIBILITIES: basic, dressipi
|
||||
auto suite_name = get_argument(arguments, "-s", "basic");
|
||||
// POSSIBILITIES: query_execution, hash_generation
|
||||
auto work_mode = get_argument(arguments, "-w", "query_execution");
|
||||
// POSSIBILITIES: mg_basic.txt, dressipi_basic.txt, dressipi_graph.txt
|
||||
auto query_set_filename = get_argument(arguments, "-q", "mg_basic.txt");
|
||||
|
||||
// init logging
|
||||
logging::init_sync();
|
||||
@ -39,7 +44,7 @@ int main(int argc, char *argv[])
|
||||
auto stripper = make_query_stripper(TK_LONG, TK_FLOAT, TK_STR, TK_BOOL);
|
||||
|
||||
// load quries
|
||||
std::string file_path = "data/queries/core/" + suite_name + ".txt";
|
||||
std::string file_path = "data/queries/core/" + query_set_filename;
|
||||
auto queries = utils::read_lines(file_path.c_str());
|
||||
|
||||
// execute all queries
|
||||
|
140
tests/integration/stream/print_record_stream.hpp
Normal file
140
tests/integration/stream/print_record_stream.hpp
Normal file
@ -0,0 +1,140 @@
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <map>
|
||||
|
||||
#include "utils/exceptions/not_yet_implemented.hpp"
|
||||
|
||||
class PrintRecordStream
|
||||
{
|
||||
private:
|
||||
std::ostream& stream;
|
||||
|
||||
public:
|
||||
PrintRecordStream(std::ostream &stream) : stream(stream) {}
|
||||
|
||||
void write_success()
|
||||
{
|
||||
stream << "SUCCESS\n";
|
||||
}
|
||||
|
||||
void write_success_empty()
|
||||
{
|
||||
stream << "SUCCESS EMPTY\n";
|
||||
}
|
||||
|
||||
void write_ignored()
|
||||
{
|
||||
stream << "IGNORED\n";
|
||||
}
|
||||
|
||||
void write_empty_fields()
|
||||
{
|
||||
stream << "EMPTY FIELDS\n";
|
||||
}
|
||||
|
||||
void write_fields(const std::vector<std::string> &fields)
|
||||
{
|
||||
stream << "FIELDS:";
|
||||
for (auto &field : fields)
|
||||
{
|
||||
stream << " " << field;
|
||||
}
|
||||
stream << '\n';
|
||||
}
|
||||
|
||||
void write_field(const std::string &field)
|
||||
{
|
||||
stream << "Field: " << field << '\n';
|
||||
}
|
||||
|
||||
void write_list_header(size_t size)
|
||||
{
|
||||
stream << "List: " << size << '\n';
|
||||
}
|
||||
|
||||
void write_record()
|
||||
{
|
||||
stream << "Record\n";
|
||||
}
|
||||
|
||||
void write_meta(const std::string &type)
|
||||
{
|
||||
stream << "Meta: " << type;
|
||||
}
|
||||
|
||||
void write_failure(const std::map<std::string, std::string> &data)
|
||||
{
|
||||
throw NotYetImplemented();
|
||||
}
|
||||
|
||||
void write_count(const size_t count)
|
||||
{
|
||||
throw NotYetImplemented();
|
||||
}
|
||||
|
||||
void write(const VertexAccessor &vertex)
|
||||
{
|
||||
throw NotYetImplemented();
|
||||
}
|
||||
|
||||
void write_vertex_record(const VertexAccessor& va)
|
||||
{
|
||||
throw NotYetImplemented();
|
||||
}
|
||||
|
||||
void write(const EdgeAccessor &edge)
|
||||
{
|
||||
throw NotYetImplemented();
|
||||
}
|
||||
|
||||
void write_edge_record(const EdgeAccessor& ea)
|
||||
{
|
||||
throw NotYetImplemented();
|
||||
}
|
||||
|
||||
void write(const StoredProperty<TypeGroupEdge> &prop)
|
||||
{
|
||||
// prop.accept(serializer);
|
||||
throw NotYetImplemented();
|
||||
}
|
||||
|
||||
void write(const StoredProperty<TypeGroupVertex> &prop)
|
||||
{
|
||||
// prop.accept(serializer);
|
||||
throw NotYetImplemented();
|
||||
}
|
||||
|
||||
void write(const Null &prop)
|
||||
{
|
||||
throw NotYetImplemented();
|
||||
}
|
||||
|
||||
void write(const Bool &prop)
|
||||
{
|
||||
throw NotYetImplemented();
|
||||
}
|
||||
|
||||
void write(const Float &prop) { throw NotYetImplemented(); }
|
||||
void write(const Int32 &prop) { throw NotYetImplemented(); }
|
||||
void write(const Int64 &prop) { throw NotYetImplemented(); }
|
||||
void write(const Double &prop) { throw NotYetImplemented(); }
|
||||
void write(const String &prop) { throw NotYetImplemented(); }
|
||||
void write(const ArrayBool &prop) { throw NotYetImplemented(); }
|
||||
void write(const ArrayInt32 &prop) { throw NotYetImplemented(); }
|
||||
void write(const ArrayInt64 &prop) { throw NotYetImplemented(); }
|
||||
void write(const ArrayFloat &prop) { throw NotYetImplemented(); }
|
||||
void write(const ArrayDouble &prop) { throw NotYetImplemented(); }
|
||||
void write(const ArrayString &prop) { throw NotYetImplemented(); }
|
||||
|
||||
void send()
|
||||
{
|
||||
throw NotYetImplemented();
|
||||
}
|
||||
|
||||
void chunk()
|
||||
{
|
||||
throw NotYetImplemented();
|
||||
}
|
||||
};
|
Loading…
Reference in New Issue
Block a user