Merge branch 'demo' into tests
This commit is contained in:
commit
1042a975dc
@ -20,8 +20,9 @@ class Db
|
||||
public:
|
||||
using sptr = std::shared_ptr<Db>;
|
||||
|
||||
Db();
|
||||
Db(const std::string &name);
|
||||
explicit Db(bool import_snapshot = true);
|
||||
Db(const char *name, bool import_snapshot = true);
|
||||
Db(const std::string &name, bool import_snapshot = true);
|
||||
Db(const Db &db) = delete;
|
||||
|
||||
private:
|
||||
|
@ -63,6 +63,15 @@ public:
|
||||
handler.finish();
|
||||
}
|
||||
|
||||
template <class Handler>
|
||||
void handle(Handler &handler) const
|
||||
{
|
||||
for (auto &kv : props)
|
||||
handler.handle(kv);
|
||||
|
||||
handler.finish();
|
||||
}
|
||||
|
||||
template <class Handler>
|
||||
void for_all(Handler handler) const
|
||||
{
|
||||
|
@ -27,7 +27,7 @@ public:
|
||||
|
||||
if (first) first = false;
|
||||
|
||||
buffer << '"' << prop.get_property_key().family_name() << "\":";
|
||||
buffer << '"' << prop.key.family_name() << "\":";
|
||||
|
||||
prop.accept(*this);
|
||||
}
|
||||
|
@ -121,6 +121,12 @@ public:
|
||||
properties().template accept<Handler>(handler);
|
||||
}
|
||||
|
||||
template <class Handler>
|
||||
void handle(Handler &handler) const
|
||||
{
|
||||
properties().template handle<Handler>(handler);
|
||||
}
|
||||
|
||||
Properties<TG> &properties() const { return record->data.props; }
|
||||
|
||||
explicit operator bool() const { return record != nullptr; }
|
||||
|
@ -28,7 +28,7 @@ inline std::ostream &operator<<(std::ostream &stream, const Vertex &record)
|
||||
JsonWriter<StringBuffer> writer(buffer);
|
||||
|
||||
// dump properties in this buffer
|
||||
record.data.props.accept(writer);
|
||||
record.data.props.handle(writer);
|
||||
writer.finish();
|
||||
|
||||
return stream << "Vertex"
|
||||
|
@ -4,6 +4,7 @@ project(memgraph_poc)
|
||||
|
||||
|
||||
include_directories(${CMAKE_SOURCE_DIR}/poc)
|
||||
include_directories(${CMAKE_SOURCE_DIR}/queries)
|
||||
|
||||
|
||||
add_executable(poc_astar astar.cpp)
|
||||
@ -18,6 +19,24 @@ target_link_libraries(profile Threads::Threads)
|
||||
target_link_libraries(profile ${fmt_static_lib})
|
||||
target_link_libraries(profile ${yaml_static_lib})
|
||||
|
||||
add_executable(csv_import csv_import.cpp)
|
||||
target_link_libraries(csv_import memgraph)
|
||||
target_link_libraries(csv_import Threads::Threads)
|
||||
target_link_libraries(csv_import ${fmt_static_lib})
|
||||
target_link_libraries(csv_import ${yaml_static_lib})
|
||||
|
||||
add_executable(add_double add_double.cpp)
|
||||
target_link_libraries(add_double memgraph)
|
||||
target_link_libraries(add_double Threads::Threads)
|
||||
target_link_libraries(add_double ${fmt_static_lib})
|
||||
target_link_libraries(add_double ${yaml_static_lib})
|
||||
|
||||
add_executable(astar_query astar_query.cpp)
|
||||
target_link_libraries(astar_query memgraph)
|
||||
target_link_libraries(astar_query Threads::Threads)
|
||||
target_link_libraries(astar_query ${fmt_static_lib})
|
||||
target_link_libraries(astar_query ${yaml_static_lib})
|
||||
|
||||
add_executable(size_aligment size_aligment.cpp)
|
||||
target_link_libraries(size_aligment memgraph)
|
||||
target_link_libraries(size_aligment Threads::Threads)
|
||||
|
47
poc/add_double.cpp
Normal file
47
poc/add_double.cpp
Normal file
@ -0,0 +1,47 @@
|
||||
#include <cstring>
|
||||
#include <ctime>
|
||||
|
||||
#include "database/db.hpp"
|
||||
#include "import/csv_import.hpp"
|
||||
#include "logging/default.hpp"
|
||||
#include "logging/streams/stdout.hpp"
|
||||
#include "utils/command_line/arguments.hpp"
|
||||
|
||||
using namespace std;
|
||||
|
||||
// Adds double property with random value of max to all vertices.
|
||||
void add_scores(Db &db, double max_value, std::string const &property_name)
|
||||
{
|
||||
DbAccessor t(db);
|
||||
|
||||
auto key_score = t.vertex_property_family_get(property_name)
|
||||
.get(Flags::Double)
|
||||
.family_key();
|
||||
|
||||
std::srand(time(0));
|
||||
t.vertex_access().fill().for_all([&](auto v) {
|
||||
double value = ((std::rand() + 0.0) / RAND_MAX) * max_value;
|
||||
v.set(StoredProperty<TypeGroupVertex>(Double(value), key_score));
|
||||
});
|
||||
|
||||
t.commit();
|
||||
}
|
||||
|
||||
int main(int argc, char **argv)
|
||||
{
|
||||
logging::init_async();
|
||||
logging::log->pipe(std::make_unique<Stdout>());
|
||||
|
||||
auto para = all_arguments(argc, argv);
|
||||
|
||||
std::string property_name = get_argument(para, "-pn", "score");
|
||||
double max_value = std::stod(get_argument(para, "-max", "1"));
|
||||
|
||||
Db db(get_argument(para, "-db", "default"));
|
||||
|
||||
add_scores(db, max_value, property_name);
|
||||
|
||||
db.snap_engine.make_snapshot();
|
||||
|
||||
return 0;
|
||||
}
|
@ -13,7 +13,10 @@
|
||||
#include "database/db.hpp"
|
||||
#include "database/db_accessor.cpp"
|
||||
#include "database/db_accessor.hpp"
|
||||
|
||||
#include "import/csv_import.hpp"
|
||||
#include "logging/default.hpp"
|
||||
#include "logging/streams/stdout.hpp"
|
||||
#include "storage/edge_x_vertex.hpp"
|
||||
#include "storage/edges.cpp"
|
||||
#include "storage/edges.hpp"
|
||||
@ -218,9 +221,12 @@ auto a_star(
|
||||
|
||||
int main(int argc, char **argv)
|
||||
{
|
||||
logging::init_async();
|
||||
logging::log->pipe(std::make_unique<Stdout>());
|
||||
|
||||
auto para = all_arguments(argc, argv);
|
||||
|
||||
Db db;
|
||||
Db db(false);
|
||||
auto loaded = import_csv_from_arguments(db, para);
|
||||
add_scores(db);
|
||||
|
||||
|
69
poc/astar_query.cpp
Normal file
69
poc/astar_query.cpp
Normal file
@ -0,0 +1,69 @@
|
||||
#include "queries/astar.cpp"
|
||||
|
||||
#include "barrier/barrier.cpp"
|
||||
|
||||
#include <chrono>
|
||||
#include <cstring>
|
||||
#include <ctime>
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
#include <queue>
|
||||
#include <regex>
|
||||
#include <sstream>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "communication/bolt/v1/serialization/bolt_serializer.hpp"
|
||||
#include "data_structures/map/rh_hashmap.hpp"
|
||||
#include "database/db.hpp"
|
||||
#include "database/db_accessor.cpp"
|
||||
#include "database/db_accessor.hpp"
|
||||
#include "import/csv_import.hpp"
|
||||
#include "logging/default.hpp"
|
||||
#include "logging/streams/stdout.hpp"
|
||||
#include "storage/edge_x_vertex.hpp"
|
||||
#include "storage/edges.cpp"
|
||||
#include "storage/edges.hpp"
|
||||
#include "storage/indexes/impl/nonunique_unordered_index.cpp"
|
||||
#include "storage/model/properties/properties.cpp"
|
||||
#include "storage/record_accessor.cpp"
|
||||
#include "storage/vertex_accessor.hpp"
|
||||
#include "storage/vertices.cpp"
|
||||
#include "storage/vertices.hpp"
|
||||
#include "utils/command_line/arguments.hpp"
|
||||
|
||||
int main(int argc, char **argv)
|
||||
{
|
||||
logging::init_async();
|
||||
logging::log->pipe(std::make_unique<Stdout>());
|
||||
std::srand(time(0));
|
||||
|
||||
auto para = all_arguments(argc, argv);
|
||||
|
||||
Db db("astar");
|
||||
barrier::CodeCPU cp;
|
||||
int bench_n = 1000;
|
||||
double sum = 0;
|
||||
for (int i = 0; i < bench_n; i++) {
|
||||
auto start_vertex_index =
|
||||
std::rand() % db.graph.vertices.access().size();
|
||||
|
||||
auto begin = clock();
|
||||
|
||||
code_args_t args;
|
||||
args.push_back(Property(Int64(start_vertex_index), Int64::type));
|
||||
|
||||
cp.run(barrier::trans(db), args, std::cout);
|
||||
|
||||
clock_t end = clock();
|
||||
|
||||
double elapsed_ms = (double(end - begin) / CLOCKS_PER_SEC) * 1000;
|
||||
sum += elapsed_ms;
|
||||
}
|
||||
|
||||
std::cout << "\nSearch for best " << barrier::limit
|
||||
<< " results has runing time of:\n avg: " << sum / bench_n
|
||||
<< " [ms]\n";
|
||||
|
||||
return 0;
|
||||
}
|
25
poc/csv_import.cpp
Normal file
25
poc/csv_import.cpp
Normal file
@ -0,0 +1,25 @@
|
||||
#include "database/db.hpp"
|
||||
#include "import/csv_import.hpp"
|
||||
#include "logging/default.hpp"
|
||||
#include "logging/streams/stdout.hpp"
|
||||
#include "utils/command_line/arguments.hpp"
|
||||
|
||||
using namespace std;
|
||||
|
||||
// Tool for importing csv to make snapshot of the database after import.
|
||||
// Accepts flags for csv import.
|
||||
// -db name # will create database with that name.
|
||||
int main(int argc, char **argv)
|
||||
{
|
||||
logging::init_async();
|
||||
logging::log->pipe(std::make_unique<Stdout>());
|
||||
|
||||
auto para = all_arguments(argc, argv);
|
||||
Db db(get_argument(para, "-db", "default"));
|
||||
|
||||
import_csv_from_arguments(db, para);
|
||||
|
||||
db.snap_engine.make_snapshot();
|
||||
|
||||
return 0;
|
||||
}
|
143
poc/queries/astar.cpp
Normal file
143
poc/queries/astar.cpp
Normal file
@ -0,0 +1,143 @@
|
||||
#include <iostream>
|
||||
#include <queue>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "query_engine/i_code_cpu.hpp"
|
||||
#include "storage/model/properties/all.hpp"
|
||||
|
||||
using std::cout;
|
||||
using std::endl;
|
||||
|
||||
// Dressipi astar query of 4 clicks.
|
||||
|
||||
// BARRIER!
|
||||
namespace barrier
|
||||
{
|
||||
|
||||
using STREAM = std::ostream; // RecordStream<::io::Socket>;
|
||||
|
||||
constexpr size_t max_depth = 3;
|
||||
constexpr size_t limit = 10;
|
||||
|
||||
class Node
|
||||
{
|
||||
public:
|
||||
Node *parent = {nullptr};
|
||||
VertexPropertyType<Double> tkey;
|
||||
double cost;
|
||||
int depth = {0};
|
||||
VertexAccessor vacc;
|
||||
|
||||
Node(VertexAccessor vacc, double cost,
|
||||
VertexPropertyType<Double> const &tkey)
|
||||
: cost(cost), vacc(vacc), tkey(tkey)
|
||||
{
|
||||
}
|
||||
Node(VertexAccessor vacc, double cost, Node *parent,
|
||||
VertexPropertyType<Double> const &tkey)
|
||||
: cost(cost), vacc(vacc), parent(parent), depth(parent->depth + 1),
|
||||
tkey(tkey)
|
||||
{
|
||||
}
|
||||
|
||||
double sum_vertex_score()
|
||||
{
|
||||
auto now = this;
|
||||
double sum = 0;
|
||||
do {
|
||||
sum += (now->vacc.at(tkey).get())->value();
|
||||
now = now->parent;
|
||||
} while (now != nullptr);
|
||||
return sum;
|
||||
}
|
||||
};
|
||||
|
||||
bool vertex_filter_contained(DbAccessor &t, VertexAccessor &v, Node *before)
|
||||
{
|
||||
if (v.fill()) {
|
||||
bool found;
|
||||
do {
|
||||
found = false;
|
||||
before = before->parent;
|
||||
if (before == nullptr) {
|
||||
return true;
|
||||
}
|
||||
} while (v.in_contains(before->vacc));
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void astar(DbAccessor &t, code_args_t &args, STREAM &stream)
|
||||
{
|
||||
VertexPropertyType<Double> tkey = t.vertex_property_key<Double>("score");
|
||||
|
||||
auto cmp = [](Node *left, Node *right) { return left->cost > right->cost; };
|
||||
std::priority_queue<Node *, std::vector<Node *>, decltype(cmp)> queue(cmp);
|
||||
std::vector<Node *> all_nodes;
|
||||
|
||||
auto start_vr = t.vertex_find(Id(args[0].as<Int64>().value()));
|
||||
if (!start_vr.is_present()) {
|
||||
// stream.write_failure({{}});
|
||||
return;
|
||||
}
|
||||
|
||||
start_vr.get().fill();
|
||||
Node *start = new Node(start_vr.take(), 0, tkey);
|
||||
queue.push(start);
|
||||
all_nodes.push_back(start);
|
||||
|
||||
int count = 0;
|
||||
do {
|
||||
auto now = queue.top();
|
||||
queue.pop();
|
||||
|
||||
if (max_depth <= now->depth) {
|
||||
// stream.write_success_empty();
|
||||
// best.push_back(now);
|
||||
count++;
|
||||
if (count >= limit) {
|
||||
break;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
iter::for_all(now->vacc.out(), [&](auto edge) {
|
||||
VertexAccessor va = edge.to();
|
||||
if (vertex_filter_contained(t, va, now)) {
|
||||
auto cost = 1 - va.at(tkey).get()->value();
|
||||
Node *n = new Node(va, now->cost + cost, now, tkey);
|
||||
queue.push(n);
|
||||
all_nodes.push_back(n);
|
||||
}
|
||||
});
|
||||
} while (!queue.empty());
|
||||
|
||||
for (auto n : all_nodes) {
|
||||
delete n;
|
||||
}
|
||||
}
|
||||
|
||||
class CodeCPU : public ICodeCPU<STREAM>
|
||||
{
|
||||
public:
|
||||
bool run(Db &db, code_args_t &args, STREAM &stream) override
|
||||
{
|
||||
DbAccessor t(db);
|
||||
|
||||
astar(t, args, stream);
|
||||
|
||||
return t.commit();
|
||||
}
|
||||
|
||||
~CodeCPU() {}
|
||||
};
|
||||
}
|
||||
|
||||
extern "C" ICodeCPU<barrier::STREAM> *produce()
|
||||
{
|
||||
// BARRIER!
|
||||
return new barrier::CodeCPU();
|
||||
}
|
||||
|
||||
extern "C" void destruct(ICodeCPU<barrier::STREAM> *p) { delete p; }
|
@ -482,6 +482,10 @@ DESTRUCTOR(VertexPropertyKey, PropertyFamilyKey);
|
||||
// ************************* EdgePropertyKey
|
||||
DESTRUCTOR(EdgePropertyKey, PropertyFamilyKey);
|
||||
|
||||
// ************************* VertexPropertyType
|
||||
#define VERTEX_PROPERTY_TYPE(x) template class VertexPropertyType<x>;
|
||||
INSTANTIATE_FOR_PROPERTY(VERTEX_PROPERTY_TYPE)
|
||||
|
||||
// ************************* VertexPropertyFamily
|
||||
OptionPtr<VertexIndex<std::nullptr_t>> VertexPropertyFamily::index()
|
||||
{
|
||||
|
@ -4,9 +4,17 @@
|
||||
#include "storage/indexes/indexes.hpp"
|
||||
#include "storage/model/properties/property_family.hpp"
|
||||
|
||||
Db::Db() : Db("default") {}
|
||||
Db::Db(bool import_snapshot) : Db("default", import_snapshot) {}
|
||||
|
||||
Db::Db(const std::string &name) : name_(name) { snap_engine.import(); }
|
||||
Db::Db(const std::string &name, bool import_snapshot)
|
||||
: Db(name.c_str(), import_snapshot)
|
||||
{
|
||||
}
|
||||
|
||||
Db::Db(const char *name, bool import_snapshot) : name_(name)
|
||||
{
|
||||
if (import_snapshot) snap_engine.import();
|
||||
}
|
||||
|
||||
Indexes Db::indexes() { return Indexes(*this); }
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user