Cleanup of poc/. Only CMakeLists is left because it is reasonable to have a proof of concept folder.
Summary: Cleanup of poc/. Reviewers: dgleich, mislav.bradac Reviewed By: mislav.bradac Subscribers: pullbot, buda Differential Revision: https://phabricator.memgraph.io/D166
This commit is contained in:
parent
f146c458be
commit
66d56820ac
@ -241,7 +241,7 @@ endif()
|
||||
option(MEMGRAPH "Build memgraph binary" ON)
|
||||
message(STATUS "MEMGRAPH binary: ${MEMGRAPH}")
|
||||
# proof of concept
|
||||
option(POC "Build proof of concept binaries" OFF)
|
||||
option(POC "Build proof of concept binaries" ON)
|
||||
message(STATUS "POC binaries: ${POC}")
|
||||
# tests
|
||||
option(ALL_TESTS "Add all test binaries" ON)
|
||||
|
2
init
2
init
@ -3,7 +3,7 @@
|
||||
echo "START"
|
||||
|
||||
# install all dependencies on debian based operating systems
|
||||
for pkg in wget git cmake uuid-dev clang-3.8 llvm-3.8 clang-format-3.8 doxygen; do
|
||||
for pkg in wget git cmake uuid-dev clang-3.8 llvm-3.8 clang-format-3.8 doxygen libreadline-dev; do
|
||||
dpkg -s $pkg 2>/dev/null >/dev/null || sudo apt-get -y install $pkg
|
||||
done
|
||||
|
||||
|
@ -15,8 +15,8 @@ foreach(poc_cpp ${poc_cpps})
|
||||
# set target name in format {project_name}_{test_type}_{exec_name}
|
||||
set(target_name ${project_name}_poc_${exec_name})
|
||||
|
||||
# build exec file
|
||||
add_executable(${target_name} ${poc_cpp} isolation/header.cpp)
|
||||
# build exe file
|
||||
add_executable(${target_name} ${poc_cpp})
|
||||
set_property(TARGET ${target_name} PROPERTY CXX_STANDARD ${cxx_standard})
|
||||
|
||||
# OUTPUT_NAME sets the real name of a target when it is built and can be
|
||||
@ -30,8 +30,8 @@ foreach(poc_cpp ${poc_cpps})
|
||||
# memgraph lib
|
||||
target_link_libraries(${target_name} memgraph_lib)
|
||||
# fmt format lib
|
||||
target_link_libraries(${target_name} ${fmt_static_lib})
|
||||
target_link_libraries(${target_name} fmt)
|
||||
# yaml parser lib
|
||||
target_link_libraries(${target_name} ${yaml_static_lib})
|
||||
target_link_libraries(${target_name} yaml-cpp)
|
||||
|
||||
endforeach()
|
||||
|
@ -1,52 +0,0 @@
|
||||
#include <cstring>
|
||||
#include <ctime>
|
||||
|
||||
#include "database/graph_db.hpp"
|
||||
#include "import/csv_import.hpp"
|
||||
#include "logging/default.hpp"
|
||||
#include "logging/streams/stdout.hpp"
|
||||
#include "utils/command_line/arguments.hpp"
|
||||
|
||||
using namespace std;
|
||||
|
||||
// Adds double property with random value of max to all vertices.
|
||||
void add_scores(Db &db, double max_value, std::string const &property_name)
|
||||
{
|
||||
DbAccessor t(db);
|
||||
|
||||
auto key_score = t.vertex_property_family_get(property_name)
|
||||
.get(Flags::Double)
|
||||
.family_key();
|
||||
|
||||
std::srand(time(0));
|
||||
t.vertex_access().fill().for_all([&](auto v) {
|
||||
double value = ((std::rand() + 0.0) / RAND_MAX) * max_value;
|
||||
v.set(StoredProperty<TypeGroupVertex>(Double(value), key_score));
|
||||
});
|
||||
|
||||
t.commit();
|
||||
}
|
||||
|
||||
// Tool to add double propertys to all vertices.
|
||||
// // Accepts flags for csv import.
|
||||
// -db name , will create database with that name.
|
||||
// -pn name , will name property with that name,default: name=score.
|
||||
// -max number , will set range of property [0,max], default: max=1
|
||||
int main(int argc, char **argv)
|
||||
{
|
||||
logging::init_async();
|
||||
logging::log->pipe(std::make_unique<Stdout>());
|
||||
|
||||
auto para = all_arguments(argc, argv);
|
||||
|
||||
std::string property_name = get_argument(para, "-pn", "score");
|
||||
double max_value = std::stod(get_argument(para, "-max", "1"));
|
||||
|
||||
Db db(get_argument(para, "-db", "default"));
|
||||
|
||||
add_scores(db, max_value, property_name);
|
||||
|
||||
db.snap_engine.make_snapshot();
|
||||
|
||||
return 0;
|
||||
}
|
328
poc/astar.cpp
328
poc/astar.cpp
@ -1,328 +0,0 @@
|
||||
#include <chrono>
|
||||
#include <cstring>
|
||||
#include <ctime>
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
#include <queue>
|
||||
#include <regex>
|
||||
#include <sstream>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "communication/bolt/v1/serialization/bolt_serializer.hpp"
|
||||
#include "data_structures/map/rh_hashmap.hpp"
|
||||
#include "database/graph_db.hpp"
|
||||
#include "database/db_accessor.cpp"
|
||||
#include "database/db_accessor.hpp"
|
||||
#include "import/csv_import.hpp"
|
||||
#include "logging/default.hpp"
|
||||
#include "logging/streams/stdout.hpp"
|
||||
#include "storage/edge_x_vertex.hpp"
|
||||
#include "storage/edges.cpp"
|
||||
#include "storage/edges.hpp"
|
||||
#include "storage/indexes/impl/nonunique_unordered_index.cpp"
|
||||
#include "storage/model/properties/properties.cpp"
|
||||
#include "storage/record_accessor.cpp"
|
||||
#include "storage/vertex_accessor.hpp"
|
||||
#include "storage/vertices.cpp"
|
||||
#include "storage/vertices.hpp"
|
||||
#include "utils/command_line/arguments.hpp"
|
||||
|
||||
const int max_score = 1000000;
|
||||
|
||||
using namespace std;
|
||||
typedef VertexAccessor VertexAccessor;
|
||||
|
||||
void add_scores(Db &db);
|
||||
|
||||
class Node
|
||||
{
|
||||
public:
|
||||
Node *parent = {nullptr};
|
||||
type_key_t<TypeGroupVertex, Double> tkey;
|
||||
double cost;
|
||||
int depth = {0};
|
||||
VertexAccessor vacc;
|
||||
|
||||
Node(VertexAccessor vacc, double cost,
|
||||
type_key_t<TypeGroupVertex, Double> tkey)
|
||||
: cost(cost), vacc(vacc), tkey(tkey)
|
||||
{
|
||||
}
|
||||
Node(VertexAccessor vacc, double cost, Node *parent,
|
||||
type_key_t<TypeGroupVertex, Double> tkey)
|
||||
: cost(cost), vacc(vacc), parent(parent), depth(parent->depth + 1),
|
||||
tkey(tkey)
|
||||
{
|
||||
}
|
||||
|
||||
double sum_vertex_score()
|
||||
{
|
||||
auto now = this;
|
||||
double sum = 0;
|
||||
do
|
||||
{
|
||||
sum += (now->vacc.at(tkey).get())->value();
|
||||
now = now->parent;
|
||||
} while (now != nullptr);
|
||||
return sum;
|
||||
}
|
||||
};
|
||||
|
||||
class Score
|
||||
{
|
||||
public:
|
||||
Score() : value(std::numeric_limits<double>::max()) {}
|
||||
Score(double v) : value(v) {}
|
||||
double value;
|
||||
};
|
||||
|
||||
void found_result(Node *res)
|
||||
{
|
||||
double sum = res->sum_vertex_score();
|
||||
|
||||
std::cout << "{score: " << sum << endl;
|
||||
auto bef = res;
|
||||
while (bef != nullptr)
|
||||
{
|
||||
std::cout << " ";
|
||||
bef->vacc.operator->()->stream_repr(std::cout);
|
||||
std::cout << std::endl;
|
||||
bef = bef->parent;
|
||||
}
|
||||
}
|
||||
|
||||
double calc_heuristic_cost_dummy(type_key_t<TypeGroupVertex, Double> tkey,
|
||||
EdgeAccessor &edge, VertexAccessor &vertex)
|
||||
{
|
||||
assert(!vertex.empty());
|
||||
return 1 - vertex.at(tkey).get()->value();
|
||||
}
|
||||
|
||||
typedef bool (*EdgeFilter)(DbAccessor &t, EdgeAccessor &, Node *before);
|
||||
typedef bool (*VertexFilter)(DbAccessor &t, VertexAccessor &, Node *before);
|
||||
|
||||
bool edge_filter_dummy(DbAccessor &t, EdgeAccessor &e, Node *before)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
bool vertex_filter_dummy(DbAccessor &t, VertexAccessor &va, Node *before)
|
||||
{
|
||||
return va.fill();
|
||||
}
|
||||
|
||||
bool vertex_filter_contained_dummy(DbAccessor &t, VertexAccessor &v,
|
||||
Node *before)
|
||||
{
|
||||
if (v.fill())
|
||||
{
|
||||
bool found;
|
||||
do
|
||||
{
|
||||
found = false;
|
||||
before = before->parent;
|
||||
if (before == nullptr)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
auto it = before->vacc.out();
|
||||
for (auto e = it.next(); e.is_present(); e = it.next())
|
||||
{
|
||||
VertexAccessor va = e.get().to();
|
||||
if (va == v)
|
||||
{
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
} while (found);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
bool vertex_filter_contained(DbAccessor &t, VertexAccessor &v, Node *before)
|
||||
{
|
||||
if (v.fill())
|
||||
{
|
||||
bool found;
|
||||
do
|
||||
{
|
||||
found = false;
|
||||
before = before->parent;
|
||||
if (before == nullptr)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
} while (v.in_contains(before->vacc));
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// Vertex filter ima max_depth funkcija te edge filter ima max_depth funkcija.
|
||||
// Jedan za svaku dubinu.
|
||||
// Filtri vracaju true ako element zadovoljava uvjete.
|
||||
auto a_star(
|
||||
Db &db, int64_t sys_id_start, uint max_depth, EdgeFilter e_filter[],
|
||||
VertexFilter v_filter[],
|
||||
double (*calc_heuristic_cost)(type_key_t<TypeGroupVertex, Double> tkey,
|
||||
EdgeAccessor &edge, VertexAccessor &vertex),
|
||||
int limit)
|
||||
{
|
||||
// get transaction
|
||||
DbAccessor t(db);
|
||||
|
||||
type_key_t<TypeGroupVertex, Double> type_key =
|
||||
t.vertex_property_family_get("score")
|
||||
.get(Flags::Double)
|
||||
.type_key<Double>();
|
||||
|
||||
auto best_found = new std::map<Id, Score>[max_depth];
|
||||
|
||||
std::vector<Node *> best;
|
||||
auto cmp = [](Node *left, Node *right) { return left->cost > right->cost; };
|
||||
std::priority_queue<Node *, std::vector<Node *>, decltype(cmp)> queue(cmp);
|
||||
|
||||
auto start_vr = t.vertex_find(sys_id_start);
|
||||
assert(start_vr);
|
||||
start_vr.get().fill();
|
||||
Node *start = new Node(start_vr.take(), 0, type_key);
|
||||
queue.push(start);
|
||||
int count = 0;
|
||||
do
|
||||
{
|
||||
auto now = queue.top();
|
||||
queue.pop();
|
||||
// if(!visited.insert(now)){
|
||||
// continue;
|
||||
// }
|
||||
|
||||
if (max_depth <= now->depth)
|
||||
{
|
||||
best.push_back(now);
|
||||
count++;
|
||||
if (count >= limit)
|
||||
{
|
||||
return best;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// { // FOUND FILTER
|
||||
// Score &bef = best_found[now->depth][now->vacc.id()];
|
||||
// if (bef.value <= now->cost) {
|
||||
// continue;
|
||||
// }
|
||||
// bef.value = now->cost;
|
||||
// }
|
||||
|
||||
iter::for_all(now->vacc.out(), [&](auto edge) {
|
||||
if (e_filter[now->depth](t, edge, now))
|
||||
{
|
||||
VertexAccessor va = edge.to();
|
||||
if (v_filter[now->depth](t, va, now))
|
||||
{
|
||||
auto cost = calc_heuristic_cost(type_key, edge, va);
|
||||
Node *n = new Node(va, now->cost + cost, now, type_key);
|
||||
queue.push(n);
|
||||
}
|
||||
}
|
||||
});
|
||||
} while (!queue.empty());
|
||||
|
||||
// TODO: GUBI SE MEMORIJA JER SE NODOVI NEBRISU
|
||||
|
||||
t.commit();
|
||||
return best;
|
||||
}
|
||||
|
||||
int main(int argc, char **argv)
|
||||
{
|
||||
logging::init_async();
|
||||
logging::log->pipe(std::make_unique<Stdout>());
|
||||
|
||||
auto para = all_arguments(argc, argv);
|
||||
|
||||
Db db(false);
|
||||
auto loaded = import_csv_from_arguments(db, para);
|
||||
add_scores(db);
|
||||
|
||||
EdgeFilter e_filters[] = {&edge_filter_dummy, &edge_filter_dummy,
|
||||
&edge_filter_dummy, &edge_filter_dummy};
|
||||
VertexFilter f_filters[] = {
|
||||
&vertex_filter_contained, &vertex_filter_contained,
|
||||
&vertex_filter_contained, &vertex_filter_contained};
|
||||
|
||||
// CONF
|
||||
std::srand(time(0));
|
||||
auto best_n = 10;
|
||||
auto bench_n = 1000;
|
||||
auto best_print_n = 10;
|
||||
bool pick_best_found =
|
||||
strcmp(get_argument(para, "-p", "true").c_str(), "true") == 0;
|
||||
|
||||
double sum = 0;
|
||||
std::vector<Node *> best;
|
||||
for (int i = 0; i < bench_n; i++)
|
||||
{
|
||||
auto start_vertex_index = std::rand() % loaded.first;
|
||||
|
||||
auto begin = clock();
|
||||
auto found = a_star(db, start_vertex_index, 3, e_filters, f_filters,
|
||||
&calc_heuristic_cost_dummy, best_n);
|
||||
clock_t end = clock();
|
||||
|
||||
double elapsed_ms = (double(end - begin) / CLOCKS_PER_SEC) * 1000;
|
||||
sum += elapsed_ms;
|
||||
|
||||
if ((best.size() < best_print_n && found.size() > best.size()) ||
|
||||
(pick_best_found && found.size() > 0 &&
|
||||
found.front()->sum_vertex_score() >
|
||||
best.front()->sum_vertex_score()))
|
||||
{
|
||||
best = found;
|
||||
}
|
||||
|
||||
// Just to be safe
|
||||
if (i + 1 == bench_n && best.size() == 0)
|
||||
{
|
||||
bench_n++;
|
||||
}
|
||||
}
|
||||
|
||||
std::cout << "\nSearch for best " << best_n
|
||||
<< " results has runing time of:\n avg: " << sum / bench_n
|
||||
<< " [ms]\n";
|
||||
std::cout << "\nExample of best result:\n";
|
||||
for (int i = 0; i < best_print_n && best.size() > 0; i++)
|
||||
{
|
||||
found_result(best.front());
|
||||
best.erase(best.begin());
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Adds property score to all vertices.
|
||||
void add_scores(Db &db)
|
||||
{
|
||||
DbAccessor t(db);
|
||||
|
||||
auto key_score =
|
||||
t.vertex_property_family_get("score").get(Flags::Double).family_key();
|
||||
|
||||
int i = 1;
|
||||
iter::for_all(t.vertex_access(), [&](auto v) {
|
||||
if (v.fill())
|
||||
{
|
||||
// any random number is OK
|
||||
std::srand(i ^ 0x7482616);
|
||||
v.set(StoredProperty<TypeGroupVertex>(
|
||||
Double((std::rand() % max_score) / (max_score + 0.0)),
|
||||
key_score));
|
||||
i++;
|
||||
}
|
||||
});
|
||||
|
||||
t.commit();
|
||||
}
|
@ -1,71 +0,0 @@
|
||||
#include <chrono>
|
||||
#include <cstring>
|
||||
#include <ctime>
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
#include <queue>
|
||||
#include <regex>
|
||||
#include <sstream>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "communication/bolt/v1/serialization/bolt_serializer.hpp"
|
||||
#include "data_structures/map/rh_hashmap.hpp"
|
||||
#include "database/graph_db.hpp"
|
||||
#include "database/db_accessor.cpp"
|
||||
#include "database/db_accessor.hpp"
|
||||
#include "import/csv_import.hpp"
|
||||
#include "logging/default.hpp"
|
||||
#include "logging/streams/stdout.hpp"
|
||||
#include "queries/astar.hpp"
|
||||
#include "storage/edge_x_vertex.hpp"
|
||||
#include "storage/edges.cpp"
|
||||
#include "storage/edges.hpp"
|
||||
#include "storage/indexes/impl/nonunique_unordered_index.cpp"
|
||||
#include "storage/model/properties/properties.cpp"
|
||||
#include "storage/record_accessor.cpp"
|
||||
#include "storage/vertex_accessor.hpp"
|
||||
#include "storage/vertices.cpp"
|
||||
#include "storage/vertices.hpp"
|
||||
#include "utils/command_line/arguments.hpp"
|
||||
|
||||
int main(int argc, char **argv)
|
||||
{
|
||||
logging::init_async();
|
||||
logging::log->pipe(std::make_unique<Stdout>());
|
||||
std::srand(time(0));
|
||||
|
||||
auto para = all_arguments(argc, argv);
|
||||
|
||||
Db db("astar");
|
||||
PlanCPU plan;
|
||||
int bench_n = 1000;
|
||||
|
||||
do
|
||||
{
|
||||
double sum = 0;
|
||||
for (int i = 0; i < bench_n; i++)
|
||||
{
|
||||
auto start_vertex_index =
|
||||
std::rand() % db.graph.vertices.access().size();
|
||||
|
||||
auto begin = clock();
|
||||
|
||||
plan_args_t args;
|
||||
args.push_back(Property(Int64(start_vertex_index), Int64::type));
|
||||
|
||||
plan.run(db, args, std::cout);
|
||||
|
||||
clock_t end = clock();
|
||||
|
||||
double elapsed_ms = (double(end - begin) / CLOCKS_PER_SEC) * 1000;
|
||||
sum += elapsed_ms;
|
||||
}
|
||||
|
||||
std::cout << "\nSearch for best " << limit
|
||||
<< " results has runing time of:\n avg: " << sum / bench_n
|
||||
<< " [ms]\n";
|
||||
} while (true);
|
||||
|
||||
return 0;
|
||||
}
|
@ -1,25 +0,0 @@
|
||||
#include "database/graph_db.hpp"
|
||||
#include "import/csv_import.hpp"
|
||||
#include "logging/default.hpp"
|
||||
#include "logging/streams/stdout.hpp"
|
||||
#include "utils/command_line/arguments.hpp"
|
||||
|
||||
using namespace std;
|
||||
|
||||
// Tool for importing csv to make snapshot of the database after import.
|
||||
// Accepts flags for csv import.
|
||||
// -db name # will create database with that name.
|
||||
int main(int argc, char **argv)
|
||||
{
|
||||
logging::init_async();
|
||||
logging::log->pipe(std::make_unique<Stdout>());
|
||||
|
||||
auto para = all_arguments(argc, argv);
|
||||
Db db(get_argument(para, "-db", "default"));
|
||||
|
||||
import_csv_from_arguments(db, para);
|
||||
|
||||
db.snap_engine.make_snapshot();
|
||||
|
||||
return 0;
|
||||
}
|
6
poc/example.cpp
Normal file
6
poc/example.cpp
Normal file
@ -0,0 +1,6 @@
|
||||
#include <iostream>
|
||||
|
||||
int main() {
|
||||
std::cout << "Proof of concept binary example" << std::endl;
|
||||
return 0;
|
||||
}
|
@ -1,26 +0,0 @@
|
||||
// Making it as first import will prevent accidentaly importing to isolated
|
||||
// other code.
|
||||
#include "isolation/isolated.hpp"
|
||||
|
||||
#include <iostream>
|
||||
|
||||
#include "isolation/db.hpp"
|
||||
#include "isolation/header.hpp"
|
||||
|
||||
// using namespace base;
|
||||
|
||||
int main()
|
||||
{
|
||||
std::cout << sizeof(Accessor) << " : " << alignof(Accessor) << "\n";
|
||||
std::cout << sizeof(sha::Accessor) << " : " << alignof(sha::Accessor)
|
||||
<< "\n";
|
||||
|
||||
Db db;
|
||||
db.data = 207;
|
||||
|
||||
auto ret = sha::do_something(reinterpret_cast<sha::Db &>(db));
|
||||
|
||||
std::cout << ret << std::endl;
|
||||
|
||||
return 0;
|
||||
}
|
@ -1,25 +0,0 @@
|
||||
#include <string>
|
||||
|
||||
class Accessor
|
||||
{
|
||||
public:
|
||||
char before = ~((char)-1);
|
||||
int data = 0;
|
||||
size_t after = ~((size_t)-1);
|
||||
};
|
||||
|
||||
class Name
|
||||
{
|
||||
public:
|
||||
Name(const char *str) : name(std::string(str)) {}
|
||||
|
||||
std::string name;
|
||||
};
|
||||
|
||||
class Db
|
||||
{
|
||||
public:
|
||||
int accessed = 0;
|
||||
int data = 0;
|
||||
Name name = {"name"};
|
||||
};
|
@ -1,58 +0,0 @@
|
||||
#include "isolation/header.hpp"
|
||||
|
||||
#include "isolation/db.hpp"
|
||||
|
||||
template <class TO, class FROM>
|
||||
TO &ref_as(FROM &ref)
|
||||
{
|
||||
return (*reinterpret_cast<TO *>(&ref));
|
||||
}
|
||||
|
||||
// template <class TO, class FROM>
|
||||
// TO value_as(FROM &&ref)
|
||||
// {
|
||||
// return std::move((*reinterpret_cast<TO *>(&ref)));
|
||||
// }
|
||||
|
||||
sha::Accessor::Accessor(const sha::Accessor &other)
|
||||
: Sized(sizeof(::Accessor), alignof(::Accessor))
|
||||
{
|
||||
as<::Accessor>() = other.as<::Accessor>();
|
||||
}
|
||||
|
||||
sha::Accessor::Accessor(sha::Accessor &&other)
|
||||
: Sized(sizeof(::Accessor), alignof(::Accessor))
|
||||
{
|
||||
as<::Accessor>() = value_as<::Accessor>(other);
|
||||
}
|
||||
|
||||
sha::Accessor::~Accessor() { as<::Accessor>().~Accessor(); }
|
||||
|
||||
sha::Accessor &sha::Accessor::operator=(const sha::Accessor &other)
|
||||
{
|
||||
// TODO
|
||||
return *this;
|
||||
}
|
||||
sha::Accessor &sha::Accessor::operator=(sha::Accessor &&other)
|
||||
{
|
||||
// TODO
|
||||
return *this;
|
||||
}
|
||||
|
||||
int sha::Accessor::get_prop(sha::Name &name) { return as<::Accessor>().data; }
|
||||
|
||||
sha::Accessor sha::Db::access()
|
||||
{
|
||||
auto &db = as<::Db>();
|
||||
db.accessed++;
|
||||
::Accessor acc;
|
||||
acc.data = db.data;
|
||||
return sha::Accessor(std::move(acc));
|
||||
}
|
||||
|
||||
sha::Name &sha::Db::get_name(const char *str)
|
||||
{
|
||||
auto &db = as<::Db>();
|
||||
db.accessed++;
|
||||
return ref_as<sha::Name>(db.name);
|
||||
}
|
@ -1,155 +0,0 @@
|
||||
#pragma once
|
||||
|
||||
// All fake types should be defined here as a interface.
|
||||
|
||||
#include <cassert>
|
||||
#include <type_traits>
|
||||
#include <utility>
|
||||
|
||||
namespace sha
|
||||
{
|
||||
|
||||
template <class TO, class FROM>
|
||||
TO value_as(FROM &&ref)
|
||||
{
|
||||
return std::move((*reinterpret_cast<TO *>(&ref)));
|
||||
}
|
||||
|
||||
// Sized
|
||||
class Name;
|
||||
class Db;
|
||||
|
||||
// Unsized
|
||||
class Accessor;
|
||||
|
||||
// Marks types which will be passed only be ref/pointer.
|
||||
class Unsized
|
||||
{
|
||||
public:
|
||||
// This will assure that this class/derived classes can't be instantiated,
|
||||
// copyed or moved.
|
||||
// This way the other side can't "accidentaly" create/copy/move or destroy
|
||||
// this type because that would be errornus.
|
||||
Unsized() = delete;
|
||||
Unsized(const Unsized &other) = delete;
|
||||
Unsized(Unsized &&other) = delete;
|
||||
~Unsized() = delete;
|
||||
Unsized &operator=(const Unsized &other) = delete;
|
||||
Unsized &operator=(Unsized &&other) = delete;
|
||||
|
||||
protected:
|
||||
template <class T>
|
||||
T &as()
|
||||
{
|
||||
return (*reinterpret_cast<T *>(this));
|
||||
}
|
||||
|
||||
template <class T>
|
||||
const T &as() const
|
||||
{
|
||||
return (*reinterpret_cast<const T *>(this));
|
||||
}
|
||||
};
|
||||
|
||||
// Every type which will be passed by value must extends this class.
|
||||
template <std::size_t size_B, std::size_t alignment_B>
|
||||
class Sized
|
||||
{
|
||||
public:
|
||||
// This will assure that this/derived class can't be instantiated.
|
||||
// This way if other side can't "accidentaly" create this/derived type
|
||||
// because that would be errornus.
|
||||
Sized() = delete;
|
||||
|
||||
// This constructr also serves as a check for correctness of size and
|
||||
// aligment.
|
||||
Sized(std::size_t _size_B, std::size_t _alignment_B)
|
||||
{
|
||||
assert(size_B == _size_B);
|
||||
assert(alignment_B == _alignment_B);
|
||||
}
|
||||
|
||||
// This constructr also serves as a check for correctness of size and
|
||||
// aligment.
|
||||
template <class T>
|
||||
Sized(T &&d)
|
||||
: data(value_as<
|
||||
typename std::aligned_storage<size_B, alignment_B>::type>(
|
||||
std::move(d)))
|
||||
{
|
||||
|
||||
static_assert(size_B == sizeof(T), "Border class size mismatch");
|
||||
static_assert(alignment_B == alignof(T),
|
||||
"Border class aligment mismatch");
|
||||
}
|
||||
|
||||
protected:
|
||||
template <class T>
|
||||
T &as()
|
||||
{
|
||||
return (*reinterpret_cast<T *>(&data));
|
||||
}
|
||||
|
||||
template <class T>
|
||||
const T &as() const
|
||||
{
|
||||
return (*reinterpret_cast<const T *>(&data));
|
||||
}
|
||||
|
||||
private:
|
||||
// Here the first argument for template is size of struct in bytes. While
|
||||
// the second one is aligment of struct in bytes. Every class which will be
|
||||
// passed by value must have this kind of aligned storage with correct size
|
||||
// and aligment for that type. Unit tests to check this must be present.
|
||||
// Example would be:
|
||||
// std::aligned_storage<sizeof(std::set<int>), alignof(std::set<int>)>
|
||||
// While the resoults of sizeof and alignof would be passed as template
|
||||
// argumetns.
|
||||
// This values would be checked in tests like the following for example
|
||||
// above:
|
||||
// assert(sizeof(Accessor)==sizeof(std::set<int>));
|
||||
// assert(alignof(Accessor)==alignof(std::set<int>));
|
||||
typename std::aligned_storage<size_B, alignment_B>::type data;
|
||||
};
|
||||
|
||||
// Type which will be passed by value so it's real size matters.
|
||||
class Accessor : private Sized<16, 8>
|
||||
{
|
||||
// The only border classes which can create this class.
|
||||
friend Db;
|
||||
|
||||
private:
|
||||
// The only valid concstructor for original class
|
||||
template <class T>
|
||||
Accessor(T &&d) : Sized(std::move(d))
|
||||
{
|
||||
}
|
||||
|
||||
public:
|
||||
// If the underlying type can't be copyed or moved this two constructors
|
||||
// would be deleted.
|
||||
Accessor(const Accessor &other);
|
||||
Accessor(Accessor &&other);
|
||||
~Accessor();
|
||||
|
||||
// If the underlying type can't be copyed or moved this two operators
|
||||
// would be deleted.
|
||||
Accessor &operator=(const Accessor &other);
|
||||
Accessor &operator=(Accessor &&other);
|
||||
|
||||
int get_prop(Name &name);
|
||||
};
|
||||
|
||||
// Type which will be passed by ref/pointer only so it's size doesnt matter.
|
||||
class Name : private Unsized
|
||||
{
|
||||
};
|
||||
|
||||
// Type which will be passed by ref/pointer only so it's size doesnt matter.
|
||||
class Db : private Unsized
|
||||
{
|
||||
public:
|
||||
Accessor access();
|
||||
Name &get_name(const char *str);
|
||||
};
|
||||
}
|
@ -1,17 +0,0 @@
|
||||
// This is the file of isolated code. It has access only to header.hpp
|
||||
|
||||
#include "isolation/header.hpp"
|
||||
|
||||
namespace sha
|
||||
{
|
||||
int do_something(Db &db)
|
||||
{
|
||||
auto &name = db.get_name("name");
|
||||
|
||||
auto acc = db.access();
|
||||
|
||||
auto ret = acc.get_prop(name);
|
||||
|
||||
return ret;
|
||||
}
|
||||
}
|
@ -1,90 +0,0 @@
|
||||
#include <chrono>
|
||||
#include <ctime>
|
||||
#include <strings.h>
|
||||
#include <unistd.h>
|
||||
#include <unordered_map>
|
||||
|
||||
#include "database/graph_db.hpp"
|
||||
#include "database/db_accessor.hpp"
|
||||
#include "communication/bolt/v1/serialization/bolt_serializer.hpp"
|
||||
#include "import/csv_import.hpp"
|
||||
#include "logging/default.hpp"
|
||||
#include "logging/streams/stdout.hpp"
|
||||
#include "utils/command_line/arguments.hpp"
|
||||
#include "profile.hpp"
|
||||
|
||||
using namespace std;
|
||||
|
||||
// (company, {type_name, score})
|
||||
using company_profile_type =
|
||||
pair<VertexAccessor, unordered_map<string, double>>;
|
||||
|
||||
// Accepted flags for CSV import.
|
||||
// -db name # will create database with that name.
|
||||
// -s true # will create snapshot of the database after import.
|
||||
int main(int argc, char **argv)
|
||||
{
|
||||
// initialize logger
|
||||
logging::init_async();
|
||||
logging::log->pipe(std::make_unique<Stdout>());
|
||||
|
||||
// read program arguments
|
||||
auto para = all_arguments(argc, argv);
|
||||
Db db(get_argument(para, "-db", "powerlinks_profile"));
|
||||
|
||||
// import database
|
||||
import_csv_from_arguments(db, para);
|
||||
|
||||
{
|
||||
DbAccessor t(db);
|
||||
vector<company_profile_type> company_profiles;
|
||||
|
||||
// query benchmark
|
||||
auto begin = clock();
|
||||
int n = for_all_companys(t, company_profiles);
|
||||
clock_t end = clock();
|
||||
double elapsed_s = (double(end - begin) / CLOCKS_PER_SEC);
|
||||
|
||||
if (n == 0) {
|
||||
cout << "No companys" << endl;
|
||||
return 0;
|
||||
}
|
||||
|
||||
// performance statistics
|
||||
cout << endl
|
||||
<< "Query duration: " << (elapsed_s / n) * 1000 * 1000 << " [us]"
|
||||
<< endl;
|
||||
cout << "Throughput: " << 1 / (elapsed_s / n) << " [query/sec]" << endl;
|
||||
|
||||
// remove ones who don't have profile results
|
||||
auto res = company_profiles.back();
|
||||
while (res.second.empty()) {
|
||||
company_profiles.pop_back();
|
||||
res = company_profiles.back();
|
||||
}
|
||||
|
||||
// print specific company
|
||||
int company_id = std::stoi(get_argument(para, "-company_id", "230216"));
|
||||
for (auto &company_profile : company_profiles) {
|
||||
auto prop_vertex_id = t.vertex_property_key<Int64>("company_id");
|
||||
auto db_company_id =
|
||||
company_profile.first.at(prop_vertex_id).get();
|
||||
if (db_company_id->value() == company_id) {
|
||||
cout << endl << "CompanyID: " << company_id << endl;
|
||||
for (auto e : company_profile.second) {
|
||||
cout << e.first << " = " << e.second << endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
t.commit();
|
||||
}
|
||||
|
||||
if (get_argument(para, "-s", "false") == "true") {
|
||||
db.snap_engine.make_snapshot();
|
||||
}
|
||||
|
||||
// usleep(1000 * 1000 * 60);
|
||||
|
||||
return 0;
|
||||
}
|
339
poc/profile.hpp
339
poc/profile.hpp
@ -1,339 +0,0 @@
|
||||
#pragma once
|
||||
|
||||
#include <chrono>
|
||||
#include <ctime>
|
||||
#include <iostream>
|
||||
#include <map>
|
||||
#include <strings.h>
|
||||
#include <unistd.h>
|
||||
#include <unordered_map>
|
||||
|
||||
#include "storage/edge_x_vertex.hpp"
|
||||
|
||||
using namespace std;
|
||||
|
||||
// TODO: Turn next template, expand on it, standardize it, and use it for query
|
||||
// generation.
|
||||
|
||||
template <class C>
|
||||
void fill_to_fill(EdgeAccessor &e, const EdgeType &type, C &&consumer)
|
||||
{
|
||||
if (e.fill() && e.edge_type() == type) {
|
||||
auto to = e.to();
|
||||
if (to.fill()) {
|
||||
consumer(to);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
template <class C>
|
||||
void fill_from_fill(EdgeAccessor &e, const EdgeType &type, C &&consumer)
|
||||
{
|
||||
if (e.fill() && e.edge_type() == type) {
|
||||
auto from = e.from();
|
||||
if (from.fill()) {
|
||||
consumer(from);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
template <class C>
|
||||
void fill_to_fill(EdgeAccessor &e, C &&consumer)
|
||||
{
|
||||
if (e.fill()) {
|
||||
auto to = e.to();
|
||||
if (to.fill()) {
|
||||
consumer(to);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
template <class C>
|
||||
void to_fill(EdgeAccessor &e, C &&consumer)
|
||||
{
|
||||
auto to = e.to();
|
||||
if (to.fill()) {
|
||||
consumer(to);
|
||||
}
|
||||
}
|
||||
|
||||
template <class C>
|
||||
void to_fill(EdgeAccessor &e, const Label &label, C &&consumer)
|
||||
{
|
||||
auto to = e.to();
|
||||
if (to.fill() && to.has_label(label)) {
|
||||
consumer(to);
|
||||
}
|
||||
}
|
||||
|
||||
template <class C>
|
||||
void to_fill(EdgeAccessor &e, const EdgeType &type, const Label &label,
|
||||
C &&consumer)
|
||||
{
|
||||
if (e.edge_type() == type) {
|
||||
auto to = e.to();
|
||||
if (to.fill() && to.has_label(label)) {
|
||||
consumer(to);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
template <class C>
|
||||
void from_fill(EdgeAccessor &e, const EdgeType &type, C &&consumer)
|
||||
{
|
||||
if (e.edge_type() == type) {
|
||||
auto from = e.from();
|
||||
if (from.fill()) {
|
||||
consumer(from);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
template <class C>
|
||||
void fill_from_fill(EdgeAccessor &e, C &&consumer)
|
||||
{
|
||||
if (e.fill()) {
|
||||
auto from = e.from();
|
||||
if (from.fill()) {
|
||||
consumer(from);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
namespace iter
|
||||
{
|
||||
template <class I, class C>
|
||||
void for_all_fill(I iter, C &&consumer)
|
||||
{
|
||||
auto e = iter.next();
|
||||
while (e.is_present()) {
|
||||
if (e.get().fill()) consumer(e.take());
|
||||
e = iter.next();
|
||||
}
|
||||
}
|
||||
|
||||
template <class I, class C>
|
||||
void find_fill(I iter, C &&consumer)
|
||||
{
|
||||
auto e = iter.next();
|
||||
while (e.is_present()) {
|
||||
if (e.get().fill()) {
|
||||
if (consumer(e.take())) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
e = iter.next();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void fill_with_bt(unordered_map<string, double> &values, VertexAccessor &com,
|
||||
double weight,
|
||||
VertexPropertyType<ArrayString> &prop_vertex_business_types)
|
||||
{
|
||||
auto bus_t = com.at(prop_vertex_business_types);
|
||||
if (bus_t.is_present()) {
|
||||
for (auto &bt : bus_t.get()->value()) {
|
||||
values[bt] += weight;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void oportunity_employe_company(
|
||||
VertexAccessor &va, unordered_map<string, double> &values, double weight,
|
||||
VertexPropertyType<ArrayString> &prop_vertex_business_types,
|
||||
const EdgeType &type_created, const EdgeType &type_works_in,
|
||||
const Label &label_company)
|
||||
{
|
||||
iter::for_all_fill(va.in(), [&](auto opp_e) {
|
||||
// cout << " oec.in()" << endl;
|
||||
from_fill(opp_e, type_created, [&](auto creator) {
|
||||
// cout << " type_created" << endl;
|
||||
iter::for_all_fill(creator.out(), [&](auto creator_e) {
|
||||
// cout << " creator.out()" <<
|
||||
// endl;
|
||||
to_fill(creator_e, type_works_in, label_company,
|
||||
[&](auto end_com) {
|
||||
// cout << " fill_bt"
|
||||
// << endl;
|
||||
fill_with_bt(values, end_com, weight,
|
||||
prop_vertex_business_types);
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
auto query(DbAccessor &t, const Id &start_id)
|
||||
{
|
||||
// DbAccessor t(db);
|
||||
unordered_map<string, double> values;
|
||||
|
||||
const Label &label_company = t.label_find_or_create("Company");
|
||||
const Label &label_opportunuty = t.label_find_or_create("Opportunity");
|
||||
|
||||
const EdgeType &type_works_in = t.type_find_or_create("Works_In");
|
||||
const EdgeType &type_reached_to = t.type_find_or_create("Reached_To");
|
||||
const EdgeType &type_partnered_with =
|
||||
t.type_find_or_create("Partnered_With");
|
||||
const EdgeType &type_interested_in = t.type_find_or_create("Interested_In");
|
||||
const EdgeType &type_viewed = t.type_find_or_create("Viewed");
|
||||
const EdgeType &type_has_match = t.type_find_or_create("Has_Match");
|
||||
const EdgeType &type_searched_and_clicked =
|
||||
t.type_find_or_create("Searched_And_Clicked");
|
||||
const EdgeType &type_is_employee = t.type_find_or_create("Is_Employee");
|
||||
const EdgeType &type_created = t.type_find_or_create("Created");
|
||||
|
||||
auto prop_edge_status = t.edge_property_key<String>("status");
|
||||
auto prop_edge_count = t.edge_property_key<Int64>("count");
|
||||
auto prop_edge_feedback = t.edge_property_key<String>("feedback");
|
||||
|
||||
auto prop_vertex_business_types =
|
||||
t.vertex_property_key<ArrayString>("business_types");
|
||||
|
||||
auto osva = t.vertex_find(start_id);
|
||||
if (!option_fill(osva)) {
|
||||
cout << "Illegal start vertex" << endl;
|
||||
return values;
|
||||
}
|
||||
auto start = osva.take();
|
||||
|
||||
// PARTNERS
|
||||
iter::for_all_fill(start.out(), [&](auto e) {
|
||||
// cout << "start.out()" << endl;
|
||||
to_fill(e, type_partnered_with, label_company, [&](auto end_com) {
|
||||
fill_with_bt(values, end_com, 0.9, prop_vertex_business_types);
|
||||
});
|
||||
});
|
||||
|
||||
// PERSONELS
|
||||
::iter::for_all(start.in(), [&](auto e) {
|
||||
// cout << "start.in()" << endl;
|
||||
fill_from_fill(e, type_works_in, [&](auto employ) {
|
||||
// cout << " type_works_in" << endl;
|
||||
iter::for_all_fill(employ.out(), [&](auto employ_edge) {
|
||||
// cout << " employ.out()" << endl;
|
||||
auto &ee_type = employ_edge.edge_type();
|
||||
// cout << " ee_type: " << ee_type << endl;
|
||||
|
||||
if (ee_type == type_interested_in) {
|
||||
// cout << " type_interested_in" << endl;
|
||||
// INTERESTED IN OPPORTUNUTIES
|
||||
to_fill(employ_edge, label_opportunuty, [&](auto opp) {
|
||||
oportunity_employe_company(
|
||||
opp, values, 1, prop_vertex_business_types,
|
||||
type_created, type_works_in, label_company);
|
||||
|
||||
});
|
||||
|
||||
} else if (ee_type == type_created) {
|
||||
// cout << " type_created" << endl;
|
||||
// CREATED OPPORTUNUTIES
|
||||
to_fill(employ_edge, label_opportunuty, [&](auto opp) {
|
||||
iter::for_all_fill(opp.out(), [&](auto edge) {
|
||||
auto feedback = edge.at(prop_edge_feedback);
|
||||
if (!feedback.is_present()) {
|
||||
return;
|
||||
}
|
||||
|
||||
auto str = feedback.get()->value().c_str();
|
||||
double weight = 0;
|
||||
if (strcasecmp(str, "like") == 0) {
|
||||
weight = 1;
|
||||
} else if (strcasecmp(str, "dislike") == 0) {
|
||||
weight = -1;
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
|
||||
to_fill(edge, label_company, [&](auto end_com) {
|
||||
fill_with_bt(values, end_com, weight,
|
||||
prop_vertex_business_types);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
} else {
|
||||
// cout << " company" << endl;
|
||||
// COMPANY
|
||||
double weight = 0;
|
||||
if (ee_type == type_reached_to) {
|
||||
auto os = employ_edge.at(prop_edge_status);
|
||||
if (!os.is_present()) {
|
||||
return;
|
||||
}
|
||||
auto str = os.get()->value().c_str();
|
||||
|
||||
if (strcasecmp(str, "pending") == 0) {
|
||||
weight = 0.5;
|
||||
} else if (strcasecmp(str, "connected") == 0) {
|
||||
weight = 1;
|
||||
} else if (strcasecmp(str, "unreachable") == 0) {
|
||||
weight = 0.5;
|
||||
} else if (strcasecmp(str, "not_a_match") == 0) {
|
||||
weight = -1;
|
||||
} else {
|
||||
cout << "unknown status: " << str << endl;
|
||||
}
|
||||
} else if (ee_type == type_viewed ||
|
||||
ee_type == type_searched_and_clicked) {
|
||||
auto count = employ_edge.at(prop_edge_count);
|
||||
if (count.is_present()) {
|
||||
weight = 0.01 * (count.get()->value());
|
||||
}
|
||||
}
|
||||
|
||||
// TARGET COMPANY
|
||||
if (weight != 0) {
|
||||
to_fill(employ_edge, [&](auto t_com) {
|
||||
fill_with_bt(values, t_com, weight,
|
||||
prop_vertex_business_types);
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
return values;
|
||||
}
|
||||
|
||||
Option<Id> find_company(DbAccessor &t, int64_t cid)
|
||||
{
|
||||
// DbAccessor t(db);
|
||||
|
||||
Option<Id> found;
|
||||
|
||||
auto prop_vertex_company_id = t.vertex_property_key<Int64>("company_id");
|
||||
const Label &label_company = t.label_find_or_create("Company");
|
||||
|
||||
iter::find_fill(label_company.index().for_range(t), [&](auto v) {
|
||||
if (v.has_label(label_company)) {
|
||||
auto id = v.at(prop_vertex_company_id);
|
||||
if (id.is_present()) {
|
||||
if ((*id.get()) == cid) {
|
||||
found = Option<Id>(v.id());
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
});
|
||||
|
||||
return found;
|
||||
}
|
||||
|
||||
size_t for_all_companys(
|
||||
DbAccessor &t,
|
||||
vector<pair<VertexAccessor, unordered_map<string, double>>> &coll)
|
||||
{
|
||||
int i = 0;
|
||||
iter::for_all_fill(
|
||||
t.label_find_or_create("Company").index().for_range(t), [&](auto v) {
|
||||
coll.push_back(make_pair(v, query(t, v.id())));
|
||||
i++;
|
||||
return false;
|
||||
});
|
||||
return i;
|
||||
}
|
@ -1,155 +0,0 @@
|
||||
#include <iostream>
|
||||
#include <queue>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "query/i_plan_cpu.hpp"
|
||||
#include "storage/model/properties/all.hpp"
|
||||
#include "storage/vertex_accessor.hpp"
|
||||
#include "storage/edge_x_vertex.hpp"
|
||||
#include "utils/memory/stack_allocator.hpp"
|
||||
|
||||
using std::cout;
|
||||
using std::endl;
|
||||
|
||||
// Dressipi astar query of 4 clicks.
|
||||
|
||||
// TODO: push down appropriate
|
||||
using Stream = std::ostream;
|
||||
|
||||
// TODO: figure out from the pattern in a query
|
||||
constexpr size_t max_depth = 3;
|
||||
|
||||
// TODO: from query LIMIT 10
|
||||
constexpr size_t limit = 10;
|
||||
|
||||
class Node
|
||||
{
|
||||
public:
|
||||
Node *parent = {nullptr};
|
||||
VertexPropertyType<Double> tkey;
|
||||
double cost;
|
||||
int depth = {0};
|
||||
VertexAccessor vacc;
|
||||
|
||||
Node(VertexAccessor vacc, double cost,
|
||||
VertexPropertyType<Double> const &tkey)
|
||||
: cost(cost), vacc(vacc), tkey(tkey)
|
||||
{
|
||||
}
|
||||
Node(VertexAccessor vacc, double cost, Node *parent,
|
||||
VertexPropertyType<Double> const &tkey)
|
||||
: cost(cost), vacc(vacc), parent(parent), depth(parent->depth + 1),
|
||||
tkey(tkey)
|
||||
{
|
||||
}
|
||||
|
||||
double sum_vertex_score()
|
||||
{
|
||||
auto now = this;
|
||||
double sum = 0;
|
||||
do
|
||||
{
|
||||
sum += (now->vacc.at(tkey).get())->value();
|
||||
now = now->parent;
|
||||
} while (now != nullptr);
|
||||
return sum;
|
||||
}
|
||||
};
|
||||
|
||||
bool vertex_filter_contained(DbAccessor &t, VertexAccessor &v, Node *before)
|
||||
{
|
||||
if (v.fill())
|
||||
{
|
||||
bool found;
|
||||
do
|
||||
{
|
||||
found = false;
|
||||
before = before->parent;
|
||||
if (before == nullptr)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
} while (v.in_contains(before->vacc));
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void astar(DbAccessor &t, plan_args_t &args, Stream &stream)
|
||||
{
|
||||
StackAllocator stack;
|
||||
VertexPropertyType<Double> tkey = t.vertex_property_key<Double>("score");
|
||||
|
||||
auto cmp = [](Node *left, Node *right) { return left->cost > right->cost; };
|
||||
std::priority_queue<Node *, std::vector<Node *>, decltype(cmp)> queue(cmp);
|
||||
|
||||
// TODO: internal id independent
|
||||
auto start_vr = t.vertex_find(Id(args[0].as<Int64>().value()));
|
||||
if (!start_vr.is_present())
|
||||
{
|
||||
// TODO: stream failure
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
start_vr.get().fill();
|
||||
Node *start = new (stack.allocate<Node>()) Node(start_vr.take(), 0, tkey);
|
||||
queue.push(start);
|
||||
|
||||
int count = 0;
|
||||
do
|
||||
{
|
||||
auto now = queue.top();
|
||||
queue.pop();
|
||||
|
||||
if (now->depth >= max_depth)
|
||||
{
|
||||
// TODO: stream the result
|
||||
|
||||
count++;
|
||||
|
||||
if (count >= limit)
|
||||
{
|
||||
// the limit was reached -> STOP the execution
|
||||
break;
|
||||
}
|
||||
|
||||
// if the limit wasn't reached -> POP the next vertex
|
||||
continue;
|
||||
}
|
||||
|
||||
iter::for_all(now->vacc.out(), [&](auto edge) {
|
||||
VertexAccessor va = edge.to();
|
||||
if (vertex_filter_contained(t, va, now))
|
||||
{
|
||||
auto cost = 1 - va.at(tkey).get()->value();
|
||||
Node *n = new (stack.allocate<Node>())
|
||||
Node(va, now->cost + cost, now, tkey);
|
||||
queue.push(n);
|
||||
}
|
||||
});
|
||||
} while (!queue.empty());
|
||||
|
||||
stack.free();
|
||||
}
|
||||
|
||||
class PlanCPU : public IPlanCPU<Stream>
|
||||
{
|
||||
public:
|
||||
bool run(Db &db, plan_args_t &args, Stream &stream) override
|
||||
{
|
||||
DbAccessor t(db);
|
||||
|
||||
// TODO: find node
|
||||
|
||||
astar(t, args, stream);
|
||||
|
||||
return t.commit();
|
||||
}
|
||||
|
||||
~PlanCPU() {}
|
||||
};
|
||||
|
||||
extern "C" IPlanCPU<Stream> *produce() { return new PlanCPU(); }
|
||||
|
||||
extern "C" void destruct(IPlanCPU<Stream> *p) { delete p; }
|
@ -1,61 +0,0 @@
|
||||
#include "database/graph_db.hpp"
|
||||
#include "database/db_accessor.hpp"
|
||||
|
||||
#include <chrono>
|
||||
#include <ctime>
|
||||
#include <strings.h>
|
||||
#include <unistd.h>
|
||||
#include <unordered_map>
|
||||
#include "database/db_accessor.cpp"
|
||||
#include "import/csv_import.hpp"
|
||||
#include "storage/edge_x_vertex.hpp"
|
||||
#include "storage/indexes/impl/nonunique_unordered_index.cpp"
|
||||
#include "storage/model/properties/properties.cpp"
|
||||
#include "utils/command_line/arguments.hpp"
|
||||
|
||||
using namespace std;
|
||||
|
||||
using vertex_access_iterator =
|
||||
decltype(((DbAccessor *)nullptr_t())->vertex_access());
|
||||
|
||||
using out_edge_iterator_t =
|
||||
decltype(((VertexAccessor *)(std::nullptr_t()))->out());
|
||||
|
||||
using in_edge_iterator_t =
|
||||
decltype(((::VertexAccessor *)(std::nullptr_t()))->in());
|
||||
|
||||
int main()
|
||||
{
|
||||
cout << "std::string: " << sizeof(std::string)
|
||||
<< " aligment: " << alignof(std::string) << endl;
|
||||
|
||||
cout << "StoredProperty: " << sizeof(StoredProperty<TypeGroupVertex>)
|
||||
<< " aligment: " << alignof(StoredProperty<TypeGroupVertex>) << endl;
|
||||
|
||||
cout << "DbAccessor.vertex_access(): size: "
|
||||
<< sizeof(vertex_access_iterator)
|
||||
<< " aligment: " << alignof(vertex_access_iterator) << endl;
|
||||
|
||||
cout << "DbAccessor: size: " << sizeof(DbAccessor)
|
||||
<< " aligment: " << alignof(DbAccessor) << endl;
|
||||
|
||||
cout << "VertexAccessor: size: " << sizeof(VertexAccessor)
|
||||
<< " aligment: " << alignof(VertexAccessor) << endl;
|
||||
|
||||
cout << "std::unique_ptr<IteratorBase<const ::VertexAccessor>>: size: "
|
||||
<< sizeof(std::unique_ptr<IteratorBase<const ::VertexAccessor>>)
|
||||
<< " aligment: "
|
||||
<< alignof(std::unique_ptr<IteratorBase<const ::VertexAccessor>>)
|
||||
<< endl;
|
||||
|
||||
cout << "VertexAccessor.out(): size: " << sizeof(out_edge_iterator_t)
|
||||
<< " aligment: " << alignof(out_edge_iterator_t) << endl;
|
||||
|
||||
cout << "VertexAccessor.in(): size: " << sizeof(in_edge_iterator_t)
|
||||
<< " aligment: " << alignof(in_edge_iterator_t) << endl;
|
||||
|
||||
// cout << ": size: " << sizeof(void) << " aligment: " << alignof(void)
|
||||
// << endl;
|
||||
|
||||
return 0;
|
||||
}
|
Loading…
Reference in New Issue
Block a user