Merge branch 'master' into E129-MG-label-based-authorization
This commit is contained in:
commit
35f8978560
@ -1380,7 +1380,7 @@ cpp<#
|
||||
:documentation "Variable where the total weight for weighted shortest path will be stored."))
|
||||
(:public
|
||||
(lcp:define-enum type
|
||||
(single depth-first breadth-first weighted-shortest-path)
|
||||
(single depth-first breadth-first weighted-shortest-path all-shortest-paths)
|
||||
(:serialize))
|
||||
(lcp:define-enum direction
|
||||
(in out both)
|
||||
@ -1432,6 +1432,7 @@ cpp<#
|
||||
case Type::DEPTH_FIRST:
|
||||
case Type::BREADTH_FIRST:
|
||||
case Type::WEIGHTED_SHORTEST_PATH:
|
||||
case Type::ALL_SHORTEST_PATHS:
|
||||
return true;
|
||||
case Type::SINGLE:
|
||||
return false;
|
||||
|
@ -1733,9 +1733,10 @@ antlrcpp::Any CypherMainVisitor::visitRelationshipPattern(MemgraphCypher::Relati
|
||||
|
||||
auto relationshipLambdas = relationshipDetail->relationshipLambda();
|
||||
if (variableExpansion) {
|
||||
if (relationshipDetail->total_weight && edge->type_ != EdgeAtom::Type::WEIGHTED_SHORTEST_PATH)
|
||||
if (relationshipDetail->total_weight && edge->type_ != EdgeAtom::Type::WEIGHTED_SHORTEST_PATH &&
|
||||
edge->type_ != EdgeAtom::Type::ALL_SHORTEST_PATHS)
|
||||
throw SemanticException(
|
||||
"Variable for total weight is allowed only with weighted shortest "
|
||||
"Variable for total weight is allowed only with weighted and all shortest "
|
||||
"path expansion.");
|
||||
auto visit_lambda = [this](auto *lambda) {
|
||||
EdgeAtom::Lambda edge_lambda;
|
||||
@ -1760,14 +1761,19 @@ antlrcpp::Any CypherMainVisitor::visitRelationshipPattern(MemgraphCypher::Relati
|
||||
throw SemanticException(
|
||||
"Lambda for calculating weights is mandatory with weighted "
|
||||
"shortest path expansion.");
|
||||
else if (edge->type_ == EdgeAtom::Type::ALL_SHORTEST_PATHS)
|
||||
throw SemanticException(
|
||||
"Lambda for calculating weights is mandatory with all "
|
||||
"shortest paths expansion.");
|
||||
// In variable expansion inner variables are mandatory.
|
||||
anonymous_identifiers.push_back(&edge->filter_lambda_.inner_edge);
|
||||
anonymous_identifiers.push_back(&edge->filter_lambda_.inner_node);
|
||||
break;
|
||||
case 1:
|
||||
if (edge->type_ == EdgeAtom::Type::WEIGHTED_SHORTEST_PATH) {
|
||||
// For wShortest, the first (and required) lambda is used for weight
|
||||
// calculation.
|
||||
if (edge->type_ == EdgeAtom::Type::WEIGHTED_SHORTEST_PATH ||
|
||||
edge->type_ == EdgeAtom::Type::ALL_SHORTEST_PATHS) {
|
||||
// For wShortest and allShortest, the first (and required) lambda is
|
||||
// used for weight calculation.
|
||||
edge->weight_lambda_ = visit_lambda(relationshipLambdas[0]);
|
||||
visit_total_weight();
|
||||
// Add mandatory inner variables for filter lambda.
|
||||
@ -1779,7 +1785,7 @@ antlrcpp::Any CypherMainVisitor::visitRelationshipPattern(MemgraphCypher::Relati
|
||||
}
|
||||
break;
|
||||
case 2:
|
||||
if (edge->type_ != EdgeAtom::Type::WEIGHTED_SHORTEST_PATH)
|
||||
if (edge->type_ != EdgeAtom::Type::WEIGHTED_SHORTEST_PATH && edge->type_ != EdgeAtom::Type::ALL_SHORTEST_PATHS)
|
||||
throw SemanticException("Only one filter lambda can be supplied.");
|
||||
edge->weight_lambda_ = visit_lambda(relationshipLambdas[0]);
|
||||
visit_total_weight();
|
||||
@ -1838,6 +1844,8 @@ antlrcpp::Any CypherMainVisitor::visitVariableExpansion(MemgraphCypher::Variable
|
||||
edge_type = EdgeAtom::Type::BREADTH_FIRST;
|
||||
else if (!ctx->getTokens(MemgraphCypher::WSHORTEST).empty())
|
||||
edge_type = EdgeAtom::Type::WEIGHTED_SHORTEST_PATH;
|
||||
else if (!ctx->getTokens(MemgraphCypher::ALLSHORTEST).empty())
|
||||
edge_type = EdgeAtom::Type::ALL_SHORTEST_PATHS;
|
||||
Expression *lower = nullptr;
|
||||
Expression *upper = nullptr;
|
||||
|
||||
@ -1848,7 +1856,8 @@ antlrcpp::Any CypherMainVisitor::visitVariableExpansion(MemgraphCypher::Variable
|
||||
auto *bound = std::any_cast<Expression *>(ctx->expression()[0]->accept(this));
|
||||
if (!dots_tokens.size()) {
|
||||
// Case -[*bound]-
|
||||
if (edge_type != EdgeAtom::Type::WEIGHTED_SHORTEST_PATH) lower = bound;
|
||||
if (edge_type != EdgeAtom::Type::WEIGHTED_SHORTEST_PATH && edge_type != EdgeAtom::Type::ALL_SHORTEST_PATHS)
|
||||
lower = bound;
|
||||
upper = bound;
|
||||
} else if (dots_tokens[0]->getSourceInterval().startsAfter(ctx->expression()[0]->getSourceInterval())) {
|
||||
// Case -[*bound..]-
|
||||
@ -1862,8 +1871,8 @@ antlrcpp::Any CypherMainVisitor::visitVariableExpansion(MemgraphCypher::Variable
|
||||
lower = std::any_cast<Expression *>(ctx->expression()[0]->accept(this));
|
||||
upper = std::any_cast<Expression *>(ctx->expression()[1]->accept(this));
|
||||
}
|
||||
if (lower && edge_type == EdgeAtom::Type::WEIGHTED_SHORTEST_PATH)
|
||||
throw SemanticException("Lower bound is not allowed in weighted shortest path expansion.");
|
||||
if (lower && (edge_type == EdgeAtom::Type::WEIGHTED_SHORTEST_PATH || edge_type == EdgeAtom::Type::ALL_SHORTEST_PATHS))
|
||||
throw SemanticException("Lower bound is not allowed in weighted or all shortest path expansion.");
|
||||
|
||||
return std::make_tuple(edge_type, lower, upper);
|
||||
}
|
||||
|
@ -172,7 +172,7 @@ relationshipDetail : '[' ( name=variable )? ( relationshipTypes )? ( variableExp
|
||||
|
||||
relationshipLambda: '(' traversed_edge=variable ',' traversed_node=variable '|' expression ')';
|
||||
|
||||
variableExpansion : '*' (BFS | WSHORTEST)? ( expression )? ( '..' ( expression )? )? ;
|
||||
variableExpansion : '*' (BFS | WSHORTEST | ALLSHORTEST)? ( expression )? ( '..' ( expression )? )? ;
|
||||
|
||||
properties : mapLiteral
|
||||
| parameter
|
||||
@ -381,6 +381,7 @@ cypherKeyword : ALL
|
||||
| WHERE
|
||||
| WITH
|
||||
| WSHORTEST
|
||||
| ALLSHORTEST
|
||||
| XOR
|
||||
| YIELD
|
||||
;
|
||||
|
@ -139,6 +139,7 @@ WHEN : W H E N ;
|
||||
WHERE : W H E R E ;
|
||||
WITH : W I T H ;
|
||||
WSHORTEST : W S H O R T E S T ;
|
||||
ALLSHORTEST : A L L S H O R T E S T ;
|
||||
XOR : X O R ;
|
||||
YIELD : Y I E L D ;
|
||||
|
||||
|
@ -49,6 +49,7 @@
|
||||
#include "utils/likely.hpp"
|
||||
#include "utils/logging.hpp"
|
||||
#include "utils/memory.hpp"
|
||||
#include "utils/pmr/list.hpp"
|
||||
#include "utils/pmr/unordered_map.hpp"
|
||||
#include "utils/pmr/unordered_set.hpp"
|
||||
#include "utils/pmr/vector.hpp"
|
||||
@ -839,9 +840,9 @@ ExpandVariable::ExpandVariable(const std::shared_ptr<LogicalOperator> &input, Sy
|
||||
weight_lambda_(weight_lambda),
|
||||
total_weight_(total_weight) {
|
||||
DMG_ASSERT(type_ == EdgeAtom::Type::DEPTH_FIRST || type_ == EdgeAtom::Type::BREADTH_FIRST ||
|
||||
type_ == EdgeAtom::Type::WEIGHTED_SHORTEST_PATH,
|
||||
"ExpandVariable can only be used with breadth first, depth first or "
|
||||
"weighted shortest path type");
|
||||
type_ == EdgeAtom::Type::WEIGHTED_SHORTEST_PATH || type_ == EdgeAtom::Type::ALL_SHORTEST_PATHS,
|
||||
"ExpandVariable can only be used with breadth first, depth first, "
|
||||
"weighted shortest path or all shortest paths type");
|
||||
DMG_ASSERT(!(type_ == EdgeAtom::Type::BREADTH_FIRST && is_reverse), "Breadth first expansion can't be reversed");
|
||||
}
|
||||
|
||||
@ -1527,6 +1528,28 @@ class SingleSourceShortestPathCursor : public query::plan::Cursor {
|
||||
utils::pmr::vector<std::pair<EdgeAccessor, VertexAccessor>> to_visit_next_;
|
||||
};
|
||||
|
||||
namespace {
|
||||
|
||||
void CheckWeightType(TypedValue current_weight, utils::MemoryResource *memory) {
|
||||
if (!current_weight.IsNumeric() && !current_weight.IsDuration()) {
|
||||
throw QueryRuntimeException("Calculated weight must be numeric or a Duration, got {}.", current_weight.type());
|
||||
}
|
||||
|
||||
const auto is_valid_numeric = [&] {
|
||||
return current_weight.IsNumeric() && (current_weight >= TypedValue(0, memory)).ValueBool();
|
||||
};
|
||||
|
||||
const auto is_valid_duration = [&] {
|
||||
return current_weight.IsDuration() && (current_weight >= TypedValue(utils::Duration(0), memory)).ValueBool();
|
||||
};
|
||||
|
||||
if (!is_valid_numeric() && !is_valid_duration()) {
|
||||
throw QueryRuntimeException("Calculated weight must be non-negative!");
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
class ExpandWeightedShortestPathCursor : public query::plan::Cursor {
|
||||
public:
|
||||
ExpandWeightedShortestPathCursor(const ExpandVariable &self, utils::MemoryResource *mem)
|
||||
@ -1573,21 +1596,7 @@ class ExpandWeightedShortestPathCursor : public query::plan::Cursor {
|
||||
|
||||
TypedValue current_weight = self_.weight_lambda_->expression->Accept(evaluator);
|
||||
|
||||
if (!current_weight.IsNumeric() && !current_weight.IsDuration()) {
|
||||
throw QueryRuntimeException("Calculated weight must be numeric or a Duration, got {}.", current_weight.type());
|
||||
}
|
||||
|
||||
const auto is_valid_numeric = [&] {
|
||||
return current_weight.IsNumeric() && (current_weight >= TypedValue(0, memory)).ValueBool();
|
||||
};
|
||||
|
||||
const auto is_valid_duration = [&] {
|
||||
return current_weight.IsDuration() && (current_weight >= TypedValue(utils::Duration(0), memory)).ValueBool();
|
||||
};
|
||||
|
||||
if (!is_valid_numeric() && !is_valid_duration()) {
|
||||
throw QueryRuntimeException("Calculated weight must be non-negative!");
|
||||
}
|
||||
CheckWeightType(current_weight, memory);
|
||||
|
||||
auto next_state = create_state(vertex, depth);
|
||||
|
||||
@ -1797,6 +1806,302 @@ class ExpandWeightedShortestPathCursor : public query::plan::Cursor {
|
||||
}
|
||||
};
|
||||
|
||||
class ExpandAllShortestPathsCursor : public query::plan::Cursor {
|
||||
public:
|
||||
ExpandAllShortestPathsCursor(const ExpandVariable &self, utils::MemoryResource *mem)
|
||||
: self_(self),
|
||||
input_cursor_(self_.input_->MakeCursor(mem)),
|
||||
visited_cost_(mem),
|
||||
expanded_(mem),
|
||||
next_edges_(mem),
|
||||
traversal_stack_(mem),
|
||||
pq_(mem) {}
|
||||
|
||||
bool Pull(Frame &frame, ExecutionContext &context) override {
|
||||
SCOPED_PROFILE_OP("ExpandAllShortestPathsCursor");
|
||||
|
||||
ExpressionEvaluator evaluator(&frame, context.symbol_table, context.evaluation_context, context.db_accessor,
|
||||
storage::View::OLD);
|
||||
|
||||
// For the given (edge, direction, weight, depth) tuple checks if they
|
||||
// satisfy the "where" condition. if so, places them in the priority
|
||||
// queue.
|
||||
auto expand_vertex = [this, &evaluator, &frame](const EdgeAccessor &edge, const EdgeAtom::Direction direction,
|
||||
const TypedValue &total_weight, int64_t depth) {
|
||||
auto *memory = evaluator.GetMemoryResource();
|
||||
|
||||
auto const &next_vertex = direction == EdgeAtom::Direction::IN ? edge.From() : edge.To();
|
||||
|
||||
// If filter expression exists, evaluate filter
|
||||
if (self_.filter_lambda_.expression) {
|
||||
frame[self_.filter_lambda_.inner_edge_symbol] = edge;
|
||||
frame[self_.filter_lambda_.inner_node_symbol] = next_vertex;
|
||||
|
||||
if (!EvaluateFilter(evaluator, self_.filter_lambda_.expression)) return;
|
||||
}
|
||||
|
||||
// Evaluate current weight
|
||||
frame[self_.weight_lambda_->inner_edge_symbol] = edge;
|
||||
frame[self_.weight_lambda_->inner_node_symbol] = next_vertex;
|
||||
|
||||
TypedValue current_weight = self_.weight_lambda_->expression->Accept(evaluator);
|
||||
|
||||
CheckWeightType(current_weight, memory);
|
||||
|
||||
TypedValue next_weight = std::invoke([&] {
|
||||
if (total_weight.IsNull()) {
|
||||
return current_weight;
|
||||
}
|
||||
|
||||
ValidateWeightTypes(current_weight, total_weight);
|
||||
|
||||
return TypedValue(current_weight, memory) + total_weight;
|
||||
});
|
||||
|
||||
auto found_it = visited_cost_.find(next_vertex);
|
||||
// Check if the vertex has already been processed.
|
||||
if (found_it != visited_cost_.end()) {
|
||||
auto weight = found_it->second;
|
||||
|
||||
if (weight.IsNull() || (next_weight <= weight).ValueBool()) {
|
||||
// Has been visited, but now found a shorter path
|
||||
visited_cost_[next_vertex] = next_weight;
|
||||
} else {
|
||||
// Continue and do not expand if current weight is larger
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
visited_cost_[next_vertex] = next_weight;
|
||||
}
|
||||
|
||||
DirectedEdge directed_edge = {edge, direction, next_weight};
|
||||
pq_.push({next_weight, depth + 1, next_vertex, directed_edge});
|
||||
};
|
||||
|
||||
// Populates the priority queue structure with expansions
|
||||
// from the given vertex. skips expansions that don't satisfy
|
||||
// the "where" condition.
|
||||
auto expand_from_vertex = [this, &expand_vertex](const VertexAccessor &vertex, const TypedValue &weight,
|
||||
int64_t depth) {
|
||||
if (self_.common_.direction != EdgeAtom::Direction::IN) {
|
||||
auto out_edges = UnwrapEdgesResult(vertex.OutEdges(storage::View::OLD, self_.common_.edge_types));
|
||||
for (const auto &edge : out_edges) {
|
||||
expand_vertex(edge, EdgeAtom::Direction::OUT, weight, depth);
|
||||
}
|
||||
}
|
||||
if (self_.common_.direction != EdgeAtom::Direction::OUT) {
|
||||
auto in_edges = UnwrapEdgesResult(vertex.InEdges(storage::View::OLD, self_.common_.edge_types));
|
||||
for (const auto &edge : in_edges) {
|
||||
expand_vertex(edge, EdgeAtom::Direction::IN, weight, depth);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Check if upper bound exists
|
||||
upper_bound_ = self_.upper_bound_
|
||||
? EvaluateInt(&evaluator, self_.upper_bound_, "Max depth in all shortest paths expansion")
|
||||
: std::numeric_limits<int64_t>::max();
|
||||
|
||||
// Check if upper bound is valid
|
||||
if (upper_bound_ < 1) {
|
||||
throw QueryRuntimeException("Maximum depth in all shortest paths expansion must be at least 1.");
|
||||
}
|
||||
|
||||
std::optional<VertexAccessor> start_vertex;
|
||||
auto *memory = context.evaluation_context.memory;
|
||||
|
||||
while (true) {
|
||||
// Check if there is an external error.
|
||||
if (MustAbort(context)) throw HintedAbortError();
|
||||
|
||||
// If traversal stack if filled, the DFS traversal tree is created. Traverse the tree iteratively by preserving
|
||||
// the traversal state on stack.
|
||||
while (!traversal_stack_.empty()) {
|
||||
auto ¤t_level = traversal_stack_.back();
|
||||
auto &edges_on_frame = frame[self_.common_.edge_symbol].ValueList();
|
||||
|
||||
// Clean out the current stack
|
||||
if (current_level.empty()) {
|
||||
if (!edges_on_frame.empty()) {
|
||||
if (!self_.is_reverse_)
|
||||
edges_on_frame.erase(edges_on_frame.end());
|
||||
else
|
||||
edges_on_frame.erase(edges_on_frame.begin());
|
||||
}
|
||||
traversal_stack_.pop_back();
|
||||
continue;
|
||||
}
|
||||
|
||||
auto [current_edge, current_edge_direction, current_weight] = current_level.back();
|
||||
current_level.pop_back();
|
||||
|
||||
// Edges order depends on direction of expansion
|
||||
if (!self_.is_reverse_)
|
||||
edges_on_frame.emplace_back(current_edge);
|
||||
else
|
||||
edges_on_frame.emplace(edges_on_frame.begin(), current_edge);
|
||||
|
||||
auto next_vertex = current_edge_direction == EdgeAtom::Direction::IN ? current_edge.From() : current_edge.To();
|
||||
frame[self_.common_.node_symbol] = next_vertex;
|
||||
frame[self_.total_weight_.value()] = current_weight;
|
||||
|
||||
if (next_edges_.find({next_vertex, traversal_stack_.size()}) != next_edges_.end()) {
|
||||
auto next_vertex_edges = next_edges_[{next_vertex, traversal_stack_.size()}];
|
||||
traversal_stack_.emplace_back(std::move(next_vertex_edges));
|
||||
} else {
|
||||
// Signal the end of iteration
|
||||
utils::pmr::list<DirectedEdge> empty(memory);
|
||||
traversal_stack_.emplace_back(std::move(empty));
|
||||
}
|
||||
|
||||
if ((current_weight > visited_cost_.at(next_vertex)).ValueBool()) continue;
|
||||
return true;
|
||||
}
|
||||
|
||||
// If priority queue is empty start new pulling stream.
|
||||
if (pq_.empty()) {
|
||||
// Finish if there is nothing to pull
|
||||
if (!input_cursor_->Pull(frame, context)) return false;
|
||||
|
||||
const auto &vertex_value = frame[self_.input_symbol_];
|
||||
if (vertex_value.IsNull()) continue;
|
||||
|
||||
start_vertex = vertex_value.ValueVertex();
|
||||
if (self_.common_.existing_node) {
|
||||
const auto &node = frame[self_.common_.node_symbol];
|
||||
// Due to optional matching the existing node could be null.
|
||||
// Skip expansion for such nodes.
|
||||
if (node.IsNull()) continue;
|
||||
}
|
||||
|
||||
// Clear existing data structures.
|
||||
visited_cost_.clear();
|
||||
expanded_.clear();
|
||||
next_edges_.clear();
|
||||
traversal_stack_.clear();
|
||||
|
||||
pq_.push({TypedValue(), 0, *start_vertex, std::nullopt});
|
||||
visited_cost_.emplace(*start_vertex, 0);
|
||||
frame[self_.common_.edge_symbol] = TypedValue::TVector(memory);
|
||||
}
|
||||
|
||||
// Create a DFS traversal tree from the start node
|
||||
while (!pq_.empty()) {
|
||||
if (MustAbort(context)) throw HintedAbortError();
|
||||
|
||||
auto [current_weight, current_depth, current_vertex, maybe_directed_edge] = pq_.top();
|
||||
pq_.pop();
|
||||
|
||||
// Expand only if what we've just expanded is less than max depth.
|
||||
if (current_depth < upper_bound_) {
|
||||
if (maybe_directed_edge) {
|
||||
auto &[current_edge, direction, weight] = *maybe_directed_edge;
|
||||
if (expanded_.find(current_edge) != expanded_.end()) continue;
|
||||
expanded_.emplace(current_edge);
|
||||
}
|
||||
expand_from_vertex(current_vertex, current_weight, current_depth);
|
||||
}
|
||||
|
||||
// if current vertex is not starting vertex, maybe_directed_edge will not be nullopt
|
||||
if (maybe_directed_edge) {
|
||||
auto &[current_edge, direction, weight] = *maybe_directed_edge;
|
||||
// Searching for a previous vertex in the expansion
|
||||
auto prev_vertex = direction == EdgeAtom::Direction::IN ? current_edge.To() : current_edge.From();
|
||||
|
||||
// Update the parent
|
||||
if (next_edges_.find({prev_vertex, current_depth - 1}) == next_edges_.end()) {
|
||||
utils::pmr::list<DirectedEdge> empty(memory);
|
||||
next_edges_[{prev_vertex, current_depth - 1}] = std::move(empty);
|
||||
}
|
||||
|
||||
next_edges_.at({prev_vertex, current_depth - 1}).emplace_back(*maybe_directed_edge);
|
||||
}
|
||||
}
|
||||
|
||||
if (start_vertex && next_edges_.find({*start_vertex, 0}) != next_edges_.end()) {
|
||||
auto start_vertex_edges = next_edges_[{*start_vertex, 0}];
|
||||
traversal_stack_.emplace_back(std::move(start_vertex_edges));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Shutdown() override { input_cursor_->Shutdown(); }
|
||||
|
||||
void Reset() override {
|
||||
input_cursor_->Reset();
|
||||
visited_cost_.clear();
|
||||
expanded_.clear();
|
||||
next_edges_.clear();
|
||||
traversal_stack_.clear();
|
||||
ClearQueue();
|
||||
}
|
||||
|
||||
private:
|
||||
const ExpandVariable &self_;
|
||||
const UniqueCursorPtr input_cursor_;
|
||||
|
||||
// Upper bound on the path length.
|
||||
int64_t upper_bound_{-1};
|
||||
|
||||
struct AspStateHash {
|
||||
size_t operator()(const std::pair<VertexAccessor, int64_t> &key) const {
|
||||
return utils::HashCombine<VertexAccessor, int64_t>{}(key.first, key.second);
|
||||
}
|
||||
};
|
||||
|
||||
using DirectedEdge = std::tuple<EdgeAccessor, EdgeAtom::Direction, TypedValue>;
|
||||
using NextEdgesState = std::pair<VertexAccessor, int64_t>;
|
||||
// Maps vertices to minimum weights they got in expansion.
|
||||
utils::pmr::unordered_map<VertexAccessor, TypedValue> visited_cost_;
|
||||
// Marking the expanded edges to prevent multiple visits.
|
||||
utils::pmr::unordered_set<EdgeAccessor> expanded_;
|
||||
// Maps the vertex with the potential expansion edge.
|
||||
utils::pmr::unordered_map<NextEdgesState, utils::pmr::list<DirectedEdge>, AspStateHash> next_edges_;
|
||||
// Stack indicating the traversal level.
|
||||
utils::pmr::list<utils::pmr::list<DirectedEdge>> traversal_stack_;
|
||||
|
||||
static void ValidateWeightTypes(const TypedValue &lhs, const TypedValue &rhs) {
|
||||
if (!((lhs.IsNumeric() && lhs.IsNumeric()) || (rhs.IsDuration() && rhs.IsDuration()))) {
|
||||
throw QueryRuntimeException(utils::MessageWithLink(
|
||||
"All weights should be of the same type, either numeric or a Duration. Please update the weight "
|
||||
"expression or the filter expression.",
|
||||
"https://memgr.ph/wsp"));
|
||||
}
|
||||
}
|
||||
|
||||
// Priority queue comparator. Keep lowest weight on top of the queue.
|
||||
class PriorityQueueComparator {
|
||||
public:
|
||||
bool operator()(const std::tuple<TypedValue, int64_t, VertexAccessor, std::optional<DirectedEdge>> &lhs,
|
||||
const std::tuple<TypedValue, int64_t, VertexAccessor, std::optional<DirectedEdge>> &rhs) {
|
||||
const auto &lhs_weight = std::get<0>(lhs);
|
||||
const auto &rhs_weight = std::get<0>(rhs);
|
||||
// Null defines minimum value for all types
|
||||
if (lhs_weight.IsNull()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (rhs_weight.IsNull()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
ValidateWeightTypes(lhs_weight, rhs_weight);
|
||||
return (lhs_weight > rhs_weight).ValueBool();
|
||||
}
|
||||
};
|
||||
|
||||
// Priority queue - core element of the algorithm.
|
||||
// Stores: {weight, depth, next vertex, edge and direction}
|
||||
std::priority_queue<std::tuple<TypedValue, int64_t, VertexAccessor, std::optional<DirectedEdge>>,
|
||||
utils::pmr::vector<std::tuple<TypedValue, int64_t, VertexAccessor, std::optional<DirectedEdge>>>,
|
||||
PriorityQueueComparator>
|
||||
pq_;
|
||||
|
||||
void ClearQueue() {
|
||||
while (!pq_.empty()) pq_.pop();
|
||||
}
|
||||
};
|
||||
|
||||
UniqueCursorPtr ExpandVariable::MakeCursor(utils::MemoryResource *mem) const {
|
||||
EventCounter::IncrementCounter(EventCounter::ExpandVariableOperator);
|
||||
|
||||
@ -1811,6 +2116,8 @@ UniqueCursorPtr ExpandVariable::MakeCursor(utils::MemoryResource *mem) const {
|
||||
return MakeUniqueCursorPtr<ExpandVariableCursor>(mem, *this, mem);
|
||||
case EdgeAtom::Type::WEIGHTED_SHORTEST_PATH:
|
||||
return MakeUniqueCursorPtr<ExpandWeightedShortestPathCursor>(mem, *this, mem);
|
||||
case EdgeAtom::Type::ALL_SHORTEST_PATHS:
|
||||
return MakeUniqueCursorPtr<ExpandAllShortestPathsCursor>(mem, *this, mem);
|
||||
case EdgeAtom::Type::SINGLE:
|
||||
LOG_FATAL("ExpandVariable should not be planned for a single expansion!");
|
||||
}
|
||||
|
@ -1082,6 +1082,7 @@ pulled.")
|
||||
// that should be inaccessible (private class function won't compile)
|
||||
friend class ExpandVariableCursor;
|
||||
friend class ExpandWeightedShortestPathCursor;
|
||||
friend class ExpandAllShortestPathCursor;
|
||||
cpp<#)
|
||||
(:serialize (:slk))
|
||||
(:clone))
|
||||
|
@ -65,7 +65,7 @@ std::vector<Expansion> NormalizePatterns(const SymbolTable &symbol_table, const
|
||||
// Remove symbols which are bound by lambda arguments.
|
||||
collector.symbols_.erase(symbol_table.at(*edge->filter_lambda_.inner_edge));
|
||||
collector.symbols_.erase(symbol_table.at(*edge->filter_lambda_.inner_node));
|
||||
if (edge->type_ == EdgeAtom::Type::WEIGHTED_SHORTEST_PATH) {
|
||||
if (edge->type_ == EdgeAtom::Type::WEIGHTED_SHORTEST_PATH || edge->type_ == EdgeAtom::Type::ALL_SHORTEST_PATHS) {
|
||||
collector.symbols_.erase(symbol_table.at(*edge->weight_lambda_.inner_edge));
|
||||
collector.symbols_.erase(symbol_table.at(*edge->weight_lambda_.inner_node));
|
||||
}
|
||||
|
@ -120,6 +120,9 @@ bool PlanPrinter::PreVisit(query::plan::ExpandVariable &op) {
|
||||
case Type::WEIGHTED_SHORTEST_PATH:
|
||||
*out_ << "WeightedShortestPath";
|
||||
break;
|
||||
case Type::ALL_SHORTEST_PATHS:
|
||||
*out_ << "AllShortestPaths";
|
||||
break;
|
||||
case Type::SINGLE:
|
||||
LOG_FATAL("Unexpected ExpandVariable::type_");
|
||||
}
|
||||
@ -308,6 +311,8 @@ std::string ToString(EdgeAtom::Type type) {
|
||||
return "dfs";
|
||||
case EdgeAtom::Type::WEIGHTED_SHORTEST_PATH:
|
||||
return "wsp";
|
||||
case EdgeAtom::Type::ALL_SHORTEST_PATHS:
|
||||
return "asp";
|
||||
case EdgeAtom::Type::SINGLE:
|
||||
return "single";
|
||||
}
|
||||
@ -548,7 +553,7 @@ bool PlanToJsonVisitor::PreVisit(ExpandVariable &op) {
|
||||
|
||||
self["filter_lambda"] = op.filter_lambda_.expression ? ToJson(op.filter_lambda_.expression) : json();
|
||||
|
||||
if (op.type_ == EdgeAtom::Type::WEIGHTED_SHORTEST_PATH) {
|
||||
if (op.type_ == EdgeAtom::Type::WEIGHTED_SHORTEST_PATH || op.type_ == EdgeAtom::Type::ALL_SHORTEST_PATHS) {
|
||||
self["weight_lambda"] = ToJson(op.weight_lambda_->expression);
|
||||
self["total_weight_symbol"] = ToJson(*op.total_weight_);
|
||||
}
|
||||
|
@ -418,7 +418,7 @@ class RuleBasedPlanner {
|
||||
std::optional<ExpansionLambda> weight_lambda;
|
||||
std::optional<Symbol> total_weight;
|
||||
|
||||
if (edge->type_ == EdgeAtom::Type::WEIGHTED_SHORTEST_PATH) {
|
||||
if (edge->type_ == EdgeAtom::Type::WEIGHTED_SHORTEST_PATH || edge->type_ == EdgeAtom::Type::ALL_SHORTEST_PATHS) {
|
||||
weight_lambda.emplace(ExpansionLambda{symbol_table.at(*edge->weight_lambda_.inner_edge),
|
||||
symbol_table.at(*edge->weight_lambda_.inner_node),
|
||||
edge->weight_lambda_.expression});
|
||||
|
@ -0,0 +1,171 @@
|
||||
Feature: All Shortest Path
|
||||
|
||||
Scenario: Test match allShortest upper bound
|
||||
Given an empty graph
|
||||
And having executed:
|
||||
"""
|
||||
CREATE (n {a:'0'})-[:r {w: 1}]->({a:'1'})-[:r {w: 1}]->({a:'2'}), (n)-[:r {w: 1}]->({a:'3'})
|
||||
"""
|
||||
When executing query:
|
||||
"""
|
||||
MATCH (n {a:'0'})-[le *allShortest 1 (e, n | e.w ) w]->(m) RETURN m.a
|
||||
"""
|
||||
Then the result should be:
|
||||
| m.a |
|
||||
| '1' |
|
||||
| '3' |
|
||||
|
||||
Scenario: Test match allShortest filtered
|
||||
Given an empty graph
|
||||
And having executed:
|
||||
"""
|
||||
CREATE (n {a:'0'})-[:r {w: 1}]->({a:'1'})-[:r {w: 1}]->({a:'2'}), (n)-[:r {w: 1}]->({a:'3'})
|
||||
"""
|
||||
When executing query:
|
||||
"""
|
||||
MATCH (n {a:'0'})-[le *allShortest 1 (e, n | e.w ) w (e, n | n.a = '3')]->(m) RETURN m.a
|
||||
"""
|
||||
Then the result should be:
|
||||
| m.a |
|
||||
| '3' |
|
||||
|
||||
Scenario: Test match allShortest resulting edge list
|
||||
Given an empty graph
|
||||
And having executed:
|
||||
"""
|
||||
CREATE (n {a:'0'})-[:r {w: 1}]->({a:'1'})-[:r {w: 2}]->({a:'2'}), (n)-[:r {w: 4}]->({a:'3'})
|
||||
"""
|
||||
When executing query:
|
||||
"""
|
||||
MATCH (n {a:'0'})-[le *allShortest 10 (e, n | e.w ) w]->(m) RETURN m.a, size(le) as s, w
|
||||
"""
|
||||
Then the result should be:
|
||||
| m.a | s | w |
|
||||
| '1' | 1 | 1 |
|
||||
| '2' | 2 | 3 |
|
||||
| '3' | 1 | 4 |
|
||||
|
||||
Scenario: Test match allShortest single edge type filtered
|
||||
Given an empty graph
|
||||
And having executed:
|
||||
"""
|
||||
CREATE (n {a:'0'})-[:r0 {w: 1}]->({a:'1'})-[:r {w: 2}]->({a:'2'}), (n)-[:r {w: 3}]->({a:'4'})
|
||||
"""
|
||||
When executing query:
|
||||
"""
|
||||
MATCH ()-[le:r0 *allShortest 10 (e, n | e.w) w]->(m)
|
||||
RETURN size(le) AS s, m.a
|
||||
"""
|
||||
Then the result should be:
|
||||
| s | m.a |
|
||||
| 1 | '1' |
|
||||
|
||||
Scenario: Test match allShortest multiple edge types filtered
|
||||
Given an empty graph
|
||||
And having executed:
|
||||
"""
|
||||
CREATE (n {a:'0'})-[:r0 {w: 1}]->({a:'1'})-[:r1 {w: 2}]->({a:'2'}), (n)-[:r {w: 3}]->({a:'4'})
|
||||
"""
|
||||
When executing query:
|
||||
"""
|
||||
MATCH ()-[le :r0|:r1 *allShortest 10 (e, n | e.w) w]->(m) WHERE size(le) > 1
|
||||
RETURN size(le) AS s, (le[0]).w AS r0, (le[1]).w AS r1
|
||||
"""
|
||||
Then the result should be:
|
||||
| s | r0 | r1 |
|
||||
| 2 | 1 | 2 |
|
||||
|
||||
Scenario: Test match allShortest property filters
|
||||
Given an empty graph
|
||||
And having executed:
|
||||
"""
|
||||
CREATE (n {a:'0'})-[:r {w: 1}]->({a:'1'})-[:r {w: 2}]->({a:'2'}), (n)-[:r {w: 3}]->({a:'4'})
|
||||
"""
|
||||
When executing query:
|
||||
"""
|
||||
MATCH ()-[le *allShortest 10 {w:1} (e, n | e.w ) total_weight]->(m)
|
||||
RETURN size(le) AS s, (le[0]).w AS r0
|
||||
"""
|
||||
Then the result should be:
|
||||
| s | r0 |
|
||||
| 1 | 1 |
|
||||
|
||||
Scenario: Test match allShortest weight not a number
|
||||
Given an empty graph
|
||||
And having executed:
|
||||
"""
|
||||
CREATE (n {a:'0'})-[:r {w: 'not a number'}]->({a:'1'})-[:r {w: 2}]->({a:'2'}), (n)-[:r {w: 3}]->({a:'4'})
|
||||
"""
|
||||
When executing query:
|
||||
"""
|
||||
MATCH ()-[le *allShortest 10 (e, n | e.w ) total_weight]->(m)
|
||||
RETURN le, total_weight
|
||||
"""
|
||||
Then an error should be raised
|
||||
|
||||
Scenario: Test match allShortest negative weight
|
||||
Given an empty graph
|
||||
And having executed:
|
||||
"""
|
||||
CREATE (n {a:'0'})-[:r {w: -1}]->({a:'1'})-[:r {w: 2}]->({a:'2'}), (n)-[:r {w: 3}]->({a:'4'})
|
||||
"""
|
||||
When executing query:
|
||||
"""
|
||||
MATCH ()-[le *allShortest 10 (e, n | e.w ) total_weight]->(m)
|
||||
RETURN le, total_weight
|
||||
"""
|
||||
Then an error should be raised
|
||||
|
||||
Scenario: Test match allShortest weight duration
|
||||
Given an empty graph
|
||||
And having executed:
|
||||
"""
|
||||
CREATE (n {a:'0'})-[:r {w: DURATION('PT1S')}]->({a:'1'})-[:r {w: DURATION('PT2S')}]->({a:'2'}), (n)-[:r {w: DURATION('PT4S')}]->({a:'3'})
|
||||
"""
|
||||
When executing query:
|
||||
"""
|
||||
MATCH (n {a:'0'})-[le *allShortest 10 (e, n | e.w ) w]->(m) RETURN m.a, size(le) as s, w
|
||||
"""
|
||||
Then the result should be:
|
||||
| m.a | s | w |
|
||||
| '1' | 1 | PT1S |
|
||||
| '2' | 2 | PT3S |
|
||||
| '3' | 1 | PT4S |
|
||||
|
||||
Scenario: Test match allShortest weight negative duration
|
||||
Given an empty graph
|
||||
And having executed:
|
||||
"""
|
||||
CREATE (n {a:'0'})-[:r {w: DURATION({seconds: -1})}]->({a:'1'})-[:r {w: DURATION('PT2S')}]->({a:'2'}), (n)-[:r {w: DURATION('PT4S')}]->({a:'3'})
|
||||
"""
|
||||
When executing query:
|
||||
"""
|
||||
MATCH (n {a:'0'})-[le *allShortest 10 (e, n | e.w ) w]->(m) RETURN m.a, size(le) as s, w
|
||||
"""
|
||||
Then an error should be raised
|
||||
|
||||
Scenario: Test match allShortest weight mixed numeric and duration as weights
|
||||
Given an empty graph
|
||||
And having executed:
|
||||
"""
|
||||
CREATE (n {a:'0'})-[:r {w: 2}]->({a:'1'})-[:r {w: DURATION('PT2S')}]->({a:'2'}), (n)-[:r {w: DURATION('PT4S')}]->({a:'3'})
|
||||
"""
|
||||
When executing query:
|
||||
"""
|
||||
MATCH (n {a:'0'})-[le *allShortest 10 (e, n | e.w ) w]->(m) RETURN m.a, size(le) as s, w
|
||||
"""
|
||||
Then an error should be raised
|
||||
|
||||
Scenario: Test allShortest return both paths of same length
|
||||
Given an empty graph
|
||||
And having executed:
|
||||
"""
|
||||
CREATE (n {a:'0'})-[:r {w: 2}]->({a:'1'})-[:r {w: 3}]->({a:'2'}), (n)-[:r {w: 5}]->({a:'2'})
|
||||
"""
|
||||
When executing query:
|
||||
"""
|
||||
MATCH path=(n {a:'0'})-[r *allShortest (e, n | e.w ) w]->(m {a:'2'}) RETURN COUNT(path);
|
||||
"""
|
||||
Then the result should be:
|
||||
| COUNT(path) |
|
||||
| 2 |
|
@ -45,13 +45,10 @@ class Dataset:
|
||||
variant = self.DEFAULT_VARIANT
|
||||
if variant not in self.VARIANTS:
|
||||
raise ValueError("Invalid test variant!")
|
||||
if (self.FILES and variant not in self.FILES) and \
|
||||
(self.URLS and variant not in self.URLS):
|
||||
raise ValueError("The variant doesn't have a defined URL or "
|
||||
"file path!")
|
||||
if (self.FILES and variant not in self.FILES) and (self.URLS and variant not in self.URLS):
|
||||
raise ValueError("The variant doesn't have a defined URL or " "file path!")
|
||||
if variant not in self.SIZES:
|
||||
raise ValueError("The variant doesn't have a defined dataset "
|
||||
"size!")
|
||||
raise ValueError("The variant doesn't have a defined dataset " "size!")
|
||||
self._variant = variant
|
||||
if self.FILES is not None:
|
||||
self._file = self.FILES.get(variant, None)
|
||||
@ -63,8 +60,7 @@ class Dataset:
|
||||
self._url = None
|
||||
self._size = self.SIZES[variant]
|
||||
if "vertices" not in self._size or "edges" not in self._size:
|
||||
raise ValueError("The size defined for this variant doesn't "
|
||||
"have the number of vertices and/or edges!")
|
||||
raise ValueError("The size defined for this variant doesn't " "have the number of vertices and/or edges!")
|
||||
self._num_vertices = self._size["vertices"]
|
||||
self._num_edges = self._size["edges"]
|
||||
|
||||
@ -76,8 +72,7 @@ class Dataset:
|
||||
cached_input, exists = directory.get_file("dataset.cypher")
|
||||
if not exists:
|
||||
print("Downloading dataset file:", self._url)
|
||||
downloaded_file = helpers.download_file(
|
||||
self._url, directory.get_path())
|
||||
downloaded_file = helpers.download_file(self._url, directory.get_path())
|
||||
print("Unpacking and caching file:", downloaded_file)
|
||||
helpers.unpack_and_move_file(downloaded_file, cached_input)
|
||||
print("Using cached dataset file:", cached_input)
|
||||
@ -137,18 +132,20 @@ class Pokec(Dataset):
|
||||
# Arango benchmarks
|
||||
|
||||
def benchmark__arango__single_vertex_read(self):
|
||||
return ("MATCH (n:User {id : $id}) RETURN n",
|
||||
{"id": self._get_random_vertex()})
|
||||
return ("MATCH (n:User {id : $id}) RETURN n", {"id": self._get_random_vertex()})
|
||||
|
||||
def benchmark__arango__single_vertex_write(self):
|
||||
return ("CREATE (n:UserTemp {id : $id}) RETURN n",
|
||||
{"id": random.randint(1, self._num_vertices * 10)})
|
||||
return (
|
||||
"CREATE (n:UserTemp {id : $id}) RETURN n",
|
||||
{"id": random.randint(1, self._num_vertices * 10)},
|
||||
)
|
||||
|
||||
def benchmark__arango__single_edge_write(self):
|
||||
vertex_from, vertex_to = self._get_random_from_to()
|
||||
return ("MATCH (n:User {id: $from}), (m:User {id: $to}) WITH n, m "
|
||||
"CREATE (n)-[e:Temp]->(m) RETURN e",
|
||||
{"from": vertex_from, "to": vertex_to})
|
||||
return (
|
||||
"MATCH (n:User {id: $from}), (m:User {id: $to}) WITH n, m " "CREATE (n)-[e:Temp]->(m) RETURN e",
|
||||
{"from": vertex_from, "to": vertex_to},
|
||||
)
|
||||
|
||||
def benchmark__arango__aggregate(self):
|
||||
return ("MATCH (n:User) RETURN n.age, COUNT(*)", {})
|
||||
@ -157,92 +154,112 @@ class Pokec(Dataset):
|
||||
return ("MATCH (n:User) WHERE n.age >= 18 RETURN n.age, COUNT(*)", {})
|
||||
|
||||
def benchmark__arango__expansion_1(self):
|
||||
return ("MATCH (s:User {id: $id})-->(n:User) "
|
||||
"RETURN n.id",
|
||||
{"id": self._get_random_vertex()})
|
||||
return (
|
||||
"MATCH (s:User {id: $id})-->(n:User) " "RETURN n.id",
|
||||
{"id": self._get_random_vertex()},
|
||||
)
|
||||
|
||||
def benchmark__arango__expansion_1_with_filter(self):
|
||||
return ("MATCH (s:User {id: $id})-->(n:User) "
|
||||
"WHERE n.age >= 18 "
|
||||
"RETURN n.id",
|
||||
{"id": self._get_random_vertex()})
|
||||
return (
|
||||
"MATCH (s:User {id: $id})-->(n:User) " "WHERE n.age >= 18 " "RETURN n.id",
|
||||
{"id": self._get_random_vertex()},
|
||||
)
|
||||
|
||||
def benchmark__arango__expansion_2(self):
|
||||
return ("MATCH (s:User {id: $id})-->()-->(n:User) "
|
||||
"RETURN DISTINCT n.id",
|
||||
{"id": self._get_random_vertex()})
|
||||
return (
|
||||
"MATCH (s:User {id: $id})-->()-->(n:User) " "RETURN DISTINCT n.id",
|
||||
{"id": self._get_random_vertex()},
|
||||
)
|
||||
|
||||
def benchmark__arango__expansion_2_with_filter(self):
|
||||
return ("MATCH (s:User {id: $id})-->()-->(n:User) "
|
||||
"WHERE n.age >= 18 "
|
||||
"RETURN DISTINCT n.id",
|
||||
{"id": self._get_random_vertex()})
|
||||
return (
|
||||
"MATCH (s:User {id: $id})-->()-->(n:User) " "WHERE n.age >= 18 " "RETURN DISTINCT n.id",
|
||||
{"id": self._get_random_vertex()},
|
||||
)
|
||||
|
||||
def benchmark__arango__expansion_3(self):
|
||||
return ("MATCH (s:User {id: $id})-->()-->()-->(n:User) "
|
||||
"RETURN DISTINCT n.id",
|
||||
{"id": self._get_random_vertex()})
|
||||
return (
|
||||
"MATCH (s:User {id: $id})-->()-->()-->(n:User) " "RETURN DISTINCT n.id",
|
||||
{"id": self._get_random_vertex()},
|
||||
)
|
||||
|
||||
def benchmark__arango__expansion_3_with_filter(self):
|
||||
return ("MATCH (s:User {id: $id})-->()-->()-->(n:User) "
|
||||
"WHERE n.age >= 18 "
|
||||
"RETURN DISTINCT n.id",
|
||||
{"id": self._get_random_vertex()})
|
||||
return (
|
||||
"MATCH (s:User {id: $id})-->()-->()-->(n:User) " "WHERE n.age >= 18 " "RETURN DISTINCT n.id",
|
||||
{"id": self._get_random_vertex()},
|
||||
)
|
||||
|
||||
def benchmark__arango__expansion_4(self):
|
||||
return ("MATCH (s:User {id: $id})-->()-->()-->()-->(n:User) "
|
||||
"RETURN DISTINCT n.id",
|
||||
{"id": self._get_random_vertex()})
|
||||
return (
|
||||
"MATCH (s:User {id: $id})-->()-->()-->()-->(n:User) " "RETURN DISTINCT n.id",
|
||||
{"id": self._get_random_vertex()},
|
||||
)
|
||||
|
||||
def benchmark__arango__expansion_4_with_filter(self):
|
||||
return ("MATCH (s:User {id: $id})-->()-->()-->()-->(n:User) "
|
||||
"WHERE n.age >= 18 "
|
||||
"RETURN DISTINCT n.id",
|
||||
{"id": self._get_random_vertex()})
|
||||
return (
|
||||
"MATCH (s:User {id: $id})-->()-->()-->()-->(n:User) " "WHERE n.age >= 18 " "RETURN DISTINCT n.id",
|
||||
{"id": self._get_random_vertex()},
|
||||
)
|
||||
|
||||
def benchmark__arango__neighbours_2(self):
|
||||
return ("MATCH (s:User {id: $id})-[*1..2]->(n:User) "
|
||||
"RETURN DISTINCT n.id",
|
||||
{"id": self._get_random_vertex()})
|
||||
return (
|
||||
"MATCH (s:User {id: $id})-[*1..2]->(n:User) " "RETURN DISTINCT n.id",
|
||||
{"id": self._get_random_vertex()},
|
||||
)
|
||||
|
||||
def benchmark__arango__neighbours_2_with_filter(self):
|
||||
return ("MATCH (s:User {id: $id})-[*1..2]->(n:User) "
|
||||
"WHERE n.age >= 18 "
|
||||
"RETURN DISTINCT n.id",
|
||||
{"id": self._get_random_vertex()})
|
||||
return (
|
||||
"MATCH (s:User {id: $id})-[*1..2]->(n:User) " "WHERE n.age >= 18 " "RETURN DISTINCT n.id",
|
||||
{"id": self._get_random_vertex()},
|
||||
)
|
||||
|
||||
def benchmark__arango__neighbours_2_with_data(self):
|
||||
return ("MATCH (s:User {id: $id})-[*1..2]->(n:User) "
|
||||
"RETURN DISTINCT n.id, n",
|
||||
{"id": self._get_random_vertex()})
|
||||
return (
|
||||
"MATCH (s:User {id: $id})-[*1..2]->(n:User) " "RETURN DISTINCT n.id, n",
|
||||
{"id": self._get_random_vertex()},
|
||||
)
|
||||
|
||||
def benchmark__arango__neighbours_2_with_data_and_filter(self):
|
||||
return ("MATCH (s:User {id: $id})-[*1..2]->(n:User) "
|
||||
"WHERE n.age >= 18 "
|
||||
"RETURN DISTINCT n.id, n",
|
||||
{"id": self._get_random_vertex()})
|
||||
return (
|
||||
"MATCH (s:User {id: $id})-[*1..2]->(n:User) " "WHERE n.age >= 18 " "RETURN DISTINCT n.id, n",
|
||||
{"id": self._get_random_vertex()},
|
||||
)
|
||||
|
||||
def benchmark__arango__shortest_path(self):
|
||||
vertex_from, vertex_to = self._get_random_from_to()
|
||||
return ("MATCH (n:User {id: $from}), (m:User {id: $to}) WITH n, m "
|
||||
"MATCH p=(n)-[*bfs..15]->(m) "
|
||||
"RETURN extract(n in nodes(p) | n.id) AS path",
|
||||
{"from": vertex_from, "to": vertex_to})
|
||||
return (
|
||||
"MATCH (n:User {id: $from}), (m:User {id: $to}) WITH n, m "
|
||||
"MATCH p=(n)-[*bfs..15]->(m) "
|
||||
"RETURN extract(n in nodes(p) | n.id) AS path",
|
||||
{"from": vertex_from, "to": vertex_to},
|
||||
)
|
||||
|
||||
def benchmark__arango__shortest_path_with_filter(self):
|
||||
vertex_from, vertex_to = self._get_random_from_to()
|
||||
return ("MATCH (n:User {id: $from}), (m:User {id: $to}) WITH n, m "
|
||||
"MATCH p=(n)-[*bfs..15 (e, n | n.age >= 18)]->(m) "
|
||||
"RETURN extract(n in nodes(p) | n.id) AS path",
|
||||
{"from": vertex_from, "to": vertex_to})
|
||||
return (
|
||||
"MATCH (n:User {id: $from}), (m:User {id: $to}) WITH n, m "
|
||||
"MATCH p=(n)-[*bfs..15 (e, n | n.age >= 18)]->(m) "
|
||||
"RETURN extract(n in nodes(p) | n.id) AS path",
|
||||
{"from": vertex_from, "to": vertex_to},
|
||||
)
|
||||
|
||||
def benchmark__arango__allshortest_paths(self):
|
||||
vertex_from, vertex_to = self._get_random_from_to()
|
||||
return (
|
||||
"MATCH (n:User {id: $from}), (m:User {id: $to}) WITH n, m "
|
||||
"MATCH p=(n)-[*allshortest 2 (r, n | 1) total_weight]->(m) "
|
||||
"RETURN extract(n in nodes(p) | n.id) AS path",
|
||||
{"from": vertex_from, "to": vertex_to},
|
||||
)
|
||||
|
||||
# Our benchmark queries
|
||||
|
||||
def benchmark__create__edge(self):
|
||||
vertex_from, vertex_to = self._get_random_from_to()
|
||||
return ("MATCH (a:User {id: $from}), (b:User {id: $to}) "
|
||||
"CREATE (a)-[:TempEdge]->(b)",
|
||||
{"from": vertex_from, "to": vertex_to})
|
||||
return (
|
||||
"MATCH (a:User {id: $from}), (b:User {id: $to}) " "CREATE (a)-[:TempEdge]->(b)",
|
||||
{"from": vertex_from, "to": vertex_to},
|
||||
)
|
||||
|
||||
def benchmark__create__pattern(self):
|
||||
return ("CREATE ()-[:TempEdge]->()", {})
|
||||
@ -251,9 +268,12 @@ class Pokec(Dataset):
|
||||
return ("CREATE ()", {})
|
||||
|
||||
def benchmark__create__vertex_big(self):
|
||||
return ("CREATE (:L1:L2:L3:L4:L5:L6:L7 {p1: true, p2: 42, "
|
||||
"p3: \"Here is some text that is not extremely short\", "
|
||||
"p4:\"Short text\", p5: 234.434, p6: 11.11, p7: false})", {})
|
||||
return (
|
||||
"CREATE (:L1:L2:L3:L4:L5:L6:L7 {p1: true, p2: 42, "
|
||||
'p3: "Here is some text that is not extremely short", '
|
||||
'p4:"Short text", p5: 234.434, p6: 11.11, p7: false})',
|
||||
{},
|
||||
)
|
||||
|
||||
def benchmark__aggregation__count(self):
|
||||
return ("MATCH (n) RETURN count(n), count(n.age)", {})
|
||||
@ -262,29 +282,31 @@ class Pokec(Dataset):
|
||||
return ("MATCH (n) RETURN min(n.age), max(n.age), avg(n.age)", {})
|
||||
|
||||
def benchmark__match__pattern_cycle(self):
|
||||
return ("MATCH (n:User {id: $id})-[e1]->(m)-[e2]->(n) "
|
||||
"RETURN e1, m, e2",
|
||||
{"id": self._get_random_vertex()})
|
||||
return (
|
||||
"MATCH (n:User {id: $id})-[e1]->(m)-[e2]->(n) " "RETURN e1, m, e2",
|
||||
{"id": self._get_random_vertex()},
|
||||
)
|
||||
|
||||
def benchmark__match__pattern_long(self):
|
||||
return ("MATCH (n1:User {id: $id})-[e1]->(n2)-[e2]->"
|
||||
"(n3)-[e3]->(n4)<-[e4]-(n5) "
|
||||
"RETURN n5 LIMIT 1",
|
||||
{"id": self._get_random_vertex()})
|
||||
return (
|
||||
"MATCH (n1:User {id: $id})-[e1]->(n2)-[e2]->" "(n3)-[e3]->(n4)<-[e4]-(n5) " "RETURN n5 LIMIT 1",
|
||||
{"id": self._get_random_vertex()},
|
||||
)
|
||||
|
||||
def benchmark__match__pattern_short(self):
|
||||
return ("MATCH (n:User {id: $id})-[e]->(m) "
|
||||
"RETURN m LIMIT 1",
|
||||
{"id": self._get_random_vertex()})
|
||||
return (
|
||||
"MATCH (n:User {id: $id})-[e]->(m) " "RETURN m LIMIT 1",
|
||||
{"id": self._get_random_vertex()},
|
||||
)
|
||||
|
||||
def benchmark__match__vertex_on_label_property(self):
|
||||
return ("MATCH (n:User) WITH n WHERE n.id = $id RETURN n",
|
||||
{"id": self._get_random_vertex()})
|
||||
return (
|
||||
"MATCH (n:User) WITH n WHERE n.id = $id RETURN n",
|
||||
{"id": self._get_random_vertex()},
|
||||
)
|
||||
|
||||
def benchmark__match__vertex_on_label_property_index(self):
|
||||
return ("MATCH (n:User {id: $id}) RETURN n",
|
||||
{"id": self._get_random_vertex()})
|
||||
return ("MATCH (n:User {id: $id}) RETURN n", {"id": self._get_random_vertex()})
|
||||
|
||||
def benchmark__match__vertex_on_property(self):
|
||||
return ("MATCH (n {id: $id}) RETURN n",
|
||||
{"id": self._get_random_vertex()})
|
||||
return ("MATCH (n {id: $id}) RETURN n", {"id": self._get_random_vertex()})
|
||||
|
@ -40,8 +40,7 @@ def _convert_args_to_flags(*args, **kwargs):
|
||||
def _get_usage(pid):
|
||||
total_cpu = 0
|
||||
with open("/proc/{}/stat".format(pid)) as f:
|
||||
total_cpu = (sum(map(int, f.read().split(")")[1].split()[11:15])) /
|
||||
os.sysconf(os.sysconf_names["SC_CLK_TCK"]))
|
||||
total_cpu = sum(map(int, f.read().split(")")[1].split()[11:15])) / os.sysconf(os.sysconf_names["SC_CLK_TCK"])
|
||||
peak_rss = 0
|
||||
with open("/proc/{}/status".format(pid)) as f:
|
||||
for row in f:
|
||||
@ -60,10 +59,8 @@ class Memgraph:
|
||||
atexit.register(self._cleanup)
|
||||
|
||||
# Determine Memgraph version
|
||||
ret = subprocess.run([memgraph_binary, "--version"],
|
||||
stdout=subprocess.PIPE, check=True)
|
||||
version = re.search(r"[0-9]+\.[0-9]+\.[0-9]+",
|
||||
ret.stdout.decode("utf-8")).group(0)
|
||||
ret = subprocess.run([memgraph_binary, "--version"], stdout=subprocess.PIPE, check=True)
|
||||
version = re.search(r"[0-9]+\.[0-9]+\.[0-9]+", ret.stdout.decode("utf-8")).group(0)
|
||||
self._memgraph_version = tuple(map(int, version.split(".")))
|
||||
|
||||
def __del__(self):
|
||||
@ -79,8 +76,7 @@ class Memgraph:
|
||||
if self._memgraph_version >= (0, 50, 0):
|
||||
kwargs["storage_properties_on_edges"] = self._properties_on_edges
|
||||
else:
|
||||
assert self._properties_on_edges, \
|
||||
"Older versions of Memgraph can't disable properties on edges!"
|
||||
assert self._properties_on_edges, "Older versions of Memgraph can't disable properties on edges!"
|
||||
return _convert_args_to_flags(self._memgraph_binary, **kwargs)
|
||||
|
||||
def _start(self, **kwargs):
|
||||
@ -94,8 +90,7 @@ class Memgraph:
|
||||
raise Exception("The database process died prematurely!")
|
||||
wait_for_server(7687)
|
||||
ret = self._proc_mg.poll()
|
||||
assert ret is None, "The database process died prematurely " \
|
||||
"({})!".format(ret)
|
||||
assert ret is None, "The database process died prematurely " "({})!".format(ret)
|
||||
|
||||
def _cleanup(self):
|
||||
if self._proc_mg is None:
|
||||
@ -121,8 +116,7 @@ class Memgraph:
|
||||
|
||||
def stop(self):
|
||||
ret, usage = self._cleanup()
|
||||
assert ret == 0, "The database process exited with a non-zero " \
|
||||
"status ({})!".format(ret)
|
||||
assert ret == 0, "The database process exited with a non-zero " "status ({})!".format(ret)
|
||||
return usage
|
||||
|
||||
|
||||
@ -135,8 +129,7 @@ class Client:
|
||||
return _convert_args_to_flags(self._client_binary, **kwargs)
|
||||
|
||||
def execute(self, queries=None, file_path=None, num_workers=1):
|
||||
if (queries is None and file_path is None) or \
|
||||
(queries is not None and file_path is not None):
|
||||
if (queries is None and file_path is None) or (queries is not None and file_path is not None):
|
||||
raise ValueError("Either queries or input_path must be specified!")
|
||||
|
||||
# TODO: check `file_path.endswith(".json")` to support advanced
|
||||
@ -151,8 +144,8 @@ class Client:
|
||||
json.dump(query, f)
|
||||
f.write("\n")
|
||||
|
||||
args = self._get_args(input=file_path, num_workers=num_workers,
|
||||
queries_json=queries_json)
|
||||
args = self._get_args(input=file_path, num_workers=num_workers, queries_json=queries_json)
|
||||
ret = subprocess.run(args, stdout=subprocess.PIPE, check=True)
|
||||
data = ret.stdout.decode("utf-8").strip().split("\n")
|
||||
data = [x for x in data if not x.startswith("[")]
|
||||
return list(map(json.loads, data))
|
||||
|
@ -2030,6 +2030,359 @@ TEST_F(QueryPlanExpandWeightedShortestPath, NegativeUpperBound) {
|
||||
EXPECT_THROW(ExpandWShortest(EdgeAtom::Direction::BOTH, -1, LITERAL(true)), QueryRuntimeException);
|
||||
}
|
||||
|
||||
/** A test fixture for all shortest paths expansion */
|
||||
class QueryPlanExpandAllShortestPaths : public testing::Test {
|
||||
public:
|
||||
struct ResultType {
|
||||
std::vector<memgraph::query::EdgeAccessor> path;
|
||||
memgraph::query::VertexAccessor vertex;
|
||||
double total_weight;
|
||||
};
|
||||
|
||||
protected:
|
||||
memgraph::storage::Storage db;
|
||||
memgraph::storage::Storage::Accessor storage_dba{db.Access()};
|
||||
memgraph::query::DbAccessor dba{&storage_dba};
|
||||
std::pair<std::string, memgraph::storage::PropertyId> prop = PROPERTY_PAIR("property");
|
||||
memgraph::storage::EdgeTypeId edge_type = dba.NameToEdgeType("edge_type");
|
||||
|
||||
// make 5 vertices because we'll need to compare against them exactly
|
||||
// v[0] has `prop` with the value 0
|
||||
std::vector<memgraph::query::VertexAccessor> v;
|
||||
|
||||
// make some edges too, in a map (from, to) vertex indices
|
||||
std::unordered_map<std::pair<int, int>, memgraph::query::EdgeAccessor> e;
|
||||
|
||||
AstStorage storage;
|
||||
SymbolTable symbol_table;
|
||||
|
||||
// inner edge and vertex symbols
|
||||
Symbol filter_edge = symbol_table.CreateSymbol("f_edge", true);
|
||||
Symbol filter_node = symbol_table.CreateSymbol("f_node", true);
|
||||
|
||||
Symbol weight_edge = symbol_table.CreateSymbol("w_edge", true);
|
||||
Symbol weight_node = symbol_table.CreateSymbol("w_node", true);
|
||||
|
||||
Symbol total_weight = symbol_table.CreateSymbol("total_weight", true);
|
||||
|
||||
void SetUp() {
|
||||
for (int i = 0; i < 5; i++) {
|
||||
v.push_back(dba.InsertVertex());
|
||||
ASSERT_TRUE(v.back().SetProperty(prop.second, memgraph::storage::PropertyValue(i)).HasValue());
|
||||
}
|
||||
|
||||
auto add_edge = [&](int from, int to, double weight) {
|
||||
auto edge = dba.InsertEdge(&v[from], &v[to], edge_type);
|
||||
ASSERT_TRUE(edge->SetProperty(prop.second, memgraph::storage::PropertyValue(weight)).HasValue());
|
||||
e.emplace(std::make_pair(from, to), *edge);
|
||||
};
|
||||
|
||||
add_edge(0, 1, 5);
|
||||
add_edge(1, 4, 5);
|
||||
add_edge(0, 2, 3);
|
||||
add_edge(2, 3, 3);
|
||||
add_edge(3, 4, 3);
|
||||
add_edge(4, 0, 12);
|
||||
|
||||
dba.AdvanceCommand();
|
||||
}
|
||||
|
||||
// defines and performs an all shortest paths expansion with the given
|
||||
// params returns a vector of pairs. each pair is (vector-of-edges,
|
||||
// vertex)
|
||||
auto ExpandAllShortest(EdgeAtom::Direction direction, std::optional<int> max_depth, Expression *where,
|
||||
std::optional<int> node_id = 0, ScanAllTuple *existing_node_input = nullptr) {
|
||||
// scan the nodes optionally filtering on property value
|
||||
auto n = MakeScanAll(storage, symbol_table, "n", existing_node_input ? existing_node_input->op_ : nullptr);
|
||||
auto last_op = n.op_;
|
||||
if (node_id) {
|
||||
last_op = std::make_shared<Filter>(last_op, EQ(PROPERTY_LOOKUP(n.node_->identifier_, prop), LITERAL(*node_id)));
|
||||
}
|
||||
|
||||
auto ident_e = IDENT("e");
|
||||
ident_e->MapTo(weight_edge);
|
||||
|
||||
// expand allshortest
|
||||
auto node_sym = existing_node_input ? existing_node_input->sym_ : symbol_table.CreateSymbol("node", true);
|
||||
auto edge_list_sym = symbol_table.CreateSymbol("edgelist_", true);
|
||||
auto filter_lambda = last_op = std::make_shared<ExpandVariable>(
|
||||
last_op, n.sym_, node_sym, edge_list_sym, EdgeAtom::Type::ALL_SHORTEST_PATHS, direction,
|
||||
std::vector<memgraph::storage::EdgeTypeId>{}, false, nullptr, max_depth ? LITERAL(max_depth.value()) : nullptr,
|
||||
existing_node_input != nullptr, ExpansionLambda{filter_edge, filter_node, where},
|
||||
ExpansionLambda{weight_edge, weight_node, PROPERTY_LOOKUP(ident_e, prop)}, total_weight);
|
||||
|
||||
Frame frame(symbol_table.max_position());
|
||||
auto cursor = last_op->MakeCursor(memgraph::utils::NewDeleteResource());
|
||||
std::vector<ResultType> results;
|
||||
auto context = MakeContext(storage, symbol_table, &dba);
|
||||
while (cursor->Pull(frame, context)) {
|
||||
results.push_back(ResultType{std::vector<memgraph::query::EdgeAccessor>(), frame[node_sym].ValueVertex(),
|
||||
frame[total_weight].ValueDouble()});
|
||||
for (const TypedValue &edge : frame[edge_list_sym].ValueList())
|
||||
results.back().path.emplace_back(edge.ValueEdge());
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
template <typename TAccessor>
|
||||
auto GetProp(const TAccessor &accessor) {
|
||||
return accessor.GetProperty(memgraph::storage::View::OLD, prop.second)->ValueInt();
|
||||
}
|
||||
|
||||
template <typename TAccessor>
|
||||
auto GetDoubleProp(const TAccessor &accessor) {
|
||||
return accessor.GetProperty(memgraph::storage::View::OLD, prop.second)->ValueDouble();
|
||||
}
|
||||
|
||||
Expression *PropNe(Symbol symbol, int value) {
|
||||
auto ident = IDENT("inner_element");
|
||||
ident->MapTo(symbol);
|
||||
return NEQ(PROPERTY_LOOKUP(ident, prop), LITERAL(value));
|
||||
}
|
||||
};
|
||||
|
||||
bool compareResultType(const QueryPlanExpandAllShortestPaths::ResultType &a,
|
||||
const QueryPlanExpandAllShortestPaths::ResultType &b) {
|
||||
return a.total_weight < b.total_weight;
|
||||
}
|
||||
|
||||
// Testing all shortest paths on this graph:
|
||||
//
|
||||
// 5 5
|
||||
// /-->--[1]-->--\
|
||||
// / \
|
||||
// / 12 \ 2
|
||||
// [0]--------<--------[4]------->-------[5]
|
||||
// \ / (on some tests only)
|
||||
// \ /
|
||||
// \->[2]->-[3]->/
|
||||
// 3 3 3
|
||||
|
||||
TEST_F(QueryPlanExpandAllShortestPaths, Basic) {
|
||||
auto results = ExpandAllShortest(EdgeAtom::Direction::BOTH, 1000, LITERAL(true));
|
||||
sort(results.begin(), results.end(), compareResultType);
|
||||
|
||||
ASSERT_EQ(results.size(), 4);
|
||||
|
||||
// check end nodes
|
||||
EXPECT_EQ(GetProp(results[0].vertex), 2);
|
||||
EXPECT_EQ(GetProp(results[1].vertex), 1);
|
||||
EXPECT_EQ(GetProp(results[2].vertex), 3);
|
||||
EXPECT_EQ(GetProp(results[3].vertex), 4);
|
||||
|
||||
// check paths and total weights
|
||||
EXPECT_EQ(results[0].path.size(), 1);
|
||||
EXPECT_EQ(GetDoubleProp(results[0].path[0]), 3);
|
||||
EXPECT_EQ(results[0].total_weight, 3);
|
||||
|
||||
EXPECT_EQ(results[1].path.size(), 1);
|
||||
EXPECT_EQ(GetDoubleProp(results[1].path[0]), 5);
|
||||
EXPECT_EQ(results[1].total_weight, 5);
|
||||
|
||||
EXPECT_EQ(results[2].path.size(), 2);
|
||||
EXPECT_EQ(GetDoubleProp(results[2].path[0]), 3);
|
||||
EXPECT_EQ(GetDoubleProp(results[2].path[1]), 3);
|
||||
EXPECT_EQ(results[2].total_weight, 6);
|
||||
|
||||
EXPECT_EQ(results[3].path.size(), 3);
|
||||
EXPECT_EQ(GetDoubleProp(results[3].path[0]), 3);
|
||||
EXPECT_EQ(GetDoubleProp(results[3].path[1]), 3);
|
||||
EXPECT_EQ(GetDoubleProp(results[3].path[2]), 3);
|
||||
EXPECT_EQ(results[3].total_weight, 9);
|
||||
}
|
||||
|
||||
TEST_F(QueryPlanExpandAllShortestPaths, EdgeDirection) {
|
||||
{
|
||||
auto results = ExpandAllShortest(EdgeAtom::Direction::OUT, 1000, LITERAL(true));
|
||||
sort(results.begin(), results.end(), compareResultType);
|
||||
ASSERT_EQ(results.size(), 4);
|
||||
EXPECT_EQ(GetProp(results[0].vertex), 2);
|
||||
EXPECT_EQ(results[0].total_weight, 3);
|
||||
EXPECT_EQ(GetProp(results[1].vertex), 1);
|
||||
EXPECT_EQ(results[1].total_weight, 5);
|
||||
EXPECT_EQ(GetProp(results[2].vertex), 3);
|
||||
EXPECT_EQ(results[2].total_weight, 6);
|
||||
EXPECT_EQ(GetProp(results[3].vertex), 4);
|
||||
EXPECT_EQ(results[3].total_weight, 9);
|
||||
}
|
||||
{
|
||||
auto results = ExpandAllShortest(EdgeAtom::Direction::IN, 1000, LITERAL(true));
|
||||
sort(results.begin(), results.end(), compareResultType);
|
||||
ASSERT_EQ(results.size(), 4);
|
||||
EXPECT_EQ(GetProp(results[0].vertex), 4);
|
||||
EXPECT_EQ(results[0].total_weight, 12);
|
||||
EXPECT_EQ(GetProp(results[1].vertex), 3);
|
||||
EXPECT_EQ(results[1].total_weight, 15);
|
||||
EXPECT_EQ(GetProp(results[2].vertex), 1);
|
||||
EXPECT_EQ(results[2].total_weight, 17);
|
||||
EXPECT_EQ(GetProp(results[3].vertex), 2);
|
||||
EXPECT_EQ(results[3].total_weight, 18);
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(QueryPlanExpandAllShortestPaths, Where) {
|
||||
{
|
||||
auto results = ExpandAllShortest(EdgeAtom::Direction::BOTH, 1000, PropNe(filter_node, 2));
|
||||
ASSERT_EQ(results.size(), 3);
|
||||
EXPECT_EQ(GetProp(results[0].vertex), 1);
|
||||
EXPECT_EQ(results[0].total_weight, 5);
|
||||
EXPECT_EQ(GetProp(results[1].vertex), 4);
|
||||
EXPECT_EQ(results[1].total_weight, 10);
|
||||
EXPECT_EQ(GetProp(results[2].vertex), 3);
|
||||
EXPECT_EQ(results[2].total_weight, 13);
|
||||
}
|
||||
{
|
||||
auto results = ExpandAllShortest(EdgeAtom::Direction::BOTH, 1000, PropNe(filter_node, 1));
|
||||
ASSERT_EQ(results.size(), 3);
|
||||
EXPECT_EQ(GetProp(results[0].vertex), 2);
|
||||
EXPECT_EQ(results[0].total_weight, 3);
|
||||
EXPECT_EQ(GetProp(results[1].vertex), 3);
|
||||
EXPECT_EQ(results[1].total_weight, 6);
|
||||
EXPECT_EQ(GetProp(results[2].vertex), 4);
|
||||
EXPECT_EQ(results[2].total_weight, 9);
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(QueryPlanExpandAllShortestPaths, UpperBound) {
|
||||
{
|
||||
auto results = ExpandAllShortest(EdgeAtom::Direction::BOTH, std::nullopt, LITERAL(true));
|
||||
std::sort(results.begin(), results.end(), compareResultType);
|
||||
ASSERT_EQ(results.size(), 4);
|
||||
EXPECT_EQ(GetProp(results[0].vertex), 2);
|
||||
EXPECT_EQ(results[0].total_weight, 3);
|
||||
EXPECT_EQ(GetProp(results[1].vertex), 1);
|
||||
EXPECT_EQ(results[1].total_weight, 5);
|
||||
EXPECT_EQ(GetProp(results[2].vertex), 3);
|
||||
EXPECT_EQ(results[2].total_weight, 6);
|
||||
EXPECT_EQ(GetProp(results[3].vertex), 4);
|
||||
EXPECT_EQ(results[3].total_weight, 9);
|
||||
}
|
||||
{
|
||||
auto results = ExpandAllShortest(EdgeAtom::Direction::BOTH, 2, LITERAL(true));
|
||||
std::sort(results.begin(), results.end(), compareResultType);
|
||||
ASSERT_EQ(results.size(), 4);
|
||||
EXPECT_EQ(GetProp(results[0].vertex), 2);
|
||||
EXPECT_EQ(results[0].total_weight, 3);
|
||||
EXPECT_EQ(GetProp(results[1].vertex), 1);
|
||||
EXPECT_EQ(results[1].total_weight, 5);
|
||||
EXPECT_EQ(GetProp(results[2].vertex), 3);
|
||||
EXPECT_EQ(results[2].total_weight, 6);
|
||||
EXPECT_EQ(GetProp(results[3].vertex), 4);
|
||||
EXPECT_EQ(results[3].total_weight, 10);
|
||||
}
|
||||
{
|
||||
auto results = ExpandAllShortest(EdgeAtom::Direction::BOTH, 1, LITERAL(true));
|
||||
std::sort(results.begin(), results.end(), compareResultType);
|
||||
ASSERT_EQ(results.size(), 3);
|
||||
EXPECT_EQ(GetProp(results[0].vertex), 2);
|
||||
EXPECT_EQ(results[0].total_weight, 3);
|
||||
EXPECT_EQ(GetProp(results[1].vertex), 1);
|
||||
EXPECT_EQ(results[1].total_weight, 5);
|
||||
EXPECT_EQ(GetProp(results[2].vertex), 4);
|
||||
EXPECT_EQ(results[2].total_weight, 12);
|
||||
}
|
||||
{
|
||||
auto new_vertex = dba.InsertVertex();
|
||||
ASSERT_TRUE(new_vertex.SetProperty(prop.second, memgraph::storage::PropertyValue(5)).HasValue());
|
||||
auto edge = dba.InsertEdge(&v[4], &new_vertex, edge_type);
|
||||
ASSERT_TRUE(edge.HasValue());
|
||||
ASSERT_TRUE(edge->SetProperty(prop.second, memgraph::storage::PropertyValue(2)).HasValue());
|
||||
dba.AdvanceCommand();
|
||||
|
||||
auto results = ExpandAllShortest(EdgeAtom::Direction::BOTH, 3, LITERAL(true));
|
||||
std::sort(results.begin(), results.end(), compareResultType);
|
||||
ASSERT_EQ(results.size(), 5);
|
||||
EXPECT_EQ(GetProp(results[0].vertex), 2);
|
||||
EXPECT_EQ(results[0].total_weight, 3);
|
||||
EXPECT_EQ(GetProp(results[1].vertex), 1);
|
||||
EXPECT_EQ(results[1].total_weight, 5);
|
||||
EXPECT_EQ(GetProp(results[2].vertex), 3);
|
||||
EXPECT_EQ(results[2].total_weight, 6);
|
||||
EXPECT_EQ(GetProp(results[3].vertex), 4);
|
||||
EXPECT_EQ(results[3].total_weight, 9);
|
||||
EXPECT_EQ(GetProp(results[4].vertex), 5);
|
||||
EXPECT_EQ(results[4].total_weight, 12);
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(QueryPlanExpandAllShortestPaths, NonNumericWeight) {
|
||||
auto new_vertex = dba.InsertVertex();
|
||||
ASSERT_TRUE(new_vertex.SetProperty(prop.second, memgraph::storage::PropertyValue(5)).HasValue());
|
||||
auto edge = dba.InsertEdge(&v[4], &new_vertex, edge_type);
|
||||
ASSERT_TRUE(edge.HasValue());
|
||||
ASSERT_TRUE(edge->SetProperty(prop.second, memgraph::storage::PropertyValue("not a number")).HasValue());
|
||||
dba.AdvanceCommand();
|
||||
EXPECT_THROW(ExpandAllShortest(EdgeAtom::Direction::BOTH, 1000, LITERAL(true)), QueryRuntimeException);
|
||||
}
|
||||
|
||||
TEST_F(QueryPlanExpandAllShortestPaths, NegativeWeight) {
|
||||
auto new_vertex = dba.InsertVertex();
|
||||
ASSERT_TRUE(new_vertex.SetProperty(prop.second, memgraph::storage::PropertyValue(5)).HasValue());
|
||||
auto edge = dba.InsertEdge(&v[4], &new_vertex, edge_type);
|
||||
ASSERT_TRUE(edge.HasValue());
|
||||
ASSERT_TRUE(edge->SetProperty(prop.second, memgraph::storage::PropertyValue(-10)).HasValue()); // negative weight
|
||||
dba.AdvanceCommand();
|
||||
EXPECT_THROW(ExpandAllShortest(EdgeAtom::Direction::BOTH, 1000, LITERAL(true)), QueryRuntimeException);
|
||||
}
|
||||
|
||||
TEST_F(QueryPlanExpandAllShortestPaths, NegativeUpperBound) {
|
||||
EXPECT_THROW(ExpandAllShortest(EdgeAtom::Direction::BOTH, -1, LITERAL(true)), QueryRuntimeException);
|
||||
}
|
||||
|
||||
// MultiplePaths testing on this graph:
|
||||
// 5 5
|
||||
// [0]-->--[1]--->---[6]
|
||||
// | \ /
|
||||
// \/ 3 5 >-[4]->
|
||||
// | / 1
|
||||
// [2]-->--[3]->
|
||||
// 3 3
|
||||
|
||||
TEST_F(QueryPlanExpandAllShortestPaths, MultiplePaths) {
|
||||
auto new_vertex = dba.InsertVertex();
|
||||
ASSERT_TRUE(new_vertex.SetProperty(prop.second, memgraph::storage::PropertyValue(6)).HasValue());
|
||||
|
||||
auto edge = dba.InsertEdge(&v[4], &new_vertex, edge_type);
|
||||
ASSERT_TRUE(edge.HasValue());
|
||||
ASSERT_TRUE(edge->SetProperty(prop.second, memgraph::storage::PropertyValue(1)).HasValue());
|
||||
dba.AdvanceCommand();
|
||||
|
||||
auto edge2 = dba.InsertEdge(&v[1], &new_vertex, edge_type);
|
||||
ASSERT_TRUE(edge2.HasValue());
|
||||
ASSERT_TRUE(edge2->SetProperty(prop.second, memgraph::storage::PropertyValue(5)).HasValue());
|
||||
dba.AdvanceCommand();
|
||||
|
||||
auto results = ExpandAllShortest(EdgeAtom::Direction::BOTH, 1000, LITERAL(true));
|
||||
std::sort(results.begin(), results.end(), compareResultType);
|
||||
ASSERT_EQ(results.size(), 6);
|
||||
EXPECT_EQ(GetProp(results[4].vertex), 6);
|
||||
EXPECT_EQ(results[4].total_weight, 10);
|
||||
EXPECT_EQ(GetProp(results[5].vertex), 6);
|
||||
EXPECT_EQ(results[5].total_weight, 10);
|
||||
}
|
||||
|
||||
// Uses graph from Basic test, with double edge 2->-3 and 3->-4
|
||||
TEST_F(QueryPlanExpandAllShortestPaths, MultiEdge) {
|
||||
auto edge = dba.InsertEdge(&v[2], &v[3], edge_type);
|
||||
ASSERT_TRUE(edge.HasValue());
|
||||
ASSERT_TRUE(edge->SetProperty(prop.second, memgraph::storage::PropertyValue(3)).HasValue());
|
||||
dba.AdvanceCommand();
|
||||
|
||||
auto edge2 = dba.InsertEdge(&v[3], &v[4], edge_type);
|
||||
ASSERT_TRUE(edge2.HasValue());
|
||||
ASSERT_TRUE(edge2->SetProperty(prop.second, memgraph::storage::PropertyValue(3)).HasValue());
|
||||
dba.AdvanceCommand();
|
||||
|
||||
auto results = ExpandAllShortest(EdgeAtom::Direction::OUT, 1000, LITERAL(true));
|
||||
std::sort(results.begin(), results.end(), compareResultType);
|
||||
ASSERT_EQ(results.size(), 8);
|
||||
EXPECT_EQ(GetProp(results[6].vertex), 4);
|
||||
EXPECT_EQ(results[4].total_weight, 9);
|
||||
EXPECT_EQ(GetProp(results[7].vertex), 4);
|
||||
EXPECT_EQ(results[5].total_weight, 9);
|
||||
}
|
||||
|
||||
TEST_F(QueryPlanExpandWeightedShortestPath, FineGrainedFiltering) {
|
||||
// All edge_types and labels allowed
|
||||
{
|
||||
|
Loading…
Reference in New Issue
Block a user