Traversal namespace specialization added for real DB primitives

Summary: database/GraphDbAccessor and storage/...Accessor now returns iterators over lazily created Accessors (filtered on visibility). Dependecies and forward declarations reorganized to support new code

Reviewers: buda, teon.banek

Reviewed By: buda

Subscribers: pullbot

Differential Revision: https://phabricator.memgraph.io/D83
This commit is contained in:
florijan 2017-03-03 13:59:38 +01:00
parent 09cd425daf
commit 2aeef25881
18 changed files with 325 additions and 264 deletions

View File

@ -248,6 +248,7 @@ include_directories(${build_include_dir})
include_directories(${fmt_source_dir})
include_directories(${yaml_include_dir})
include_directories(${GTEST_INCLUDE_DIRS} ${GMOCK_INCLUDE_DIRS})
include_directories(${CMAKE_SOURCE_DIR}/libs)
# -----------------------------------------------------------------------------
# openCypher parser -----------------------------------------------------------
@ -360,7 +361,7 @@ endif()
# -----------------------------------------------------------------------------
execute_process(
COMMAND python recursive_include.py --root ${src_dir} --start ${src_dir}/query/plan_interface.hpp --copy ${CMAKE_BINARY_DIR}/include
COMMAND ./recursive_include --roots ${src_dir} ${libs_dir} --start ${src_dir}/query/plan_interface.hpp --copy ${CMAKE_BINARY_DIR}/include
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/cmake
)

178
cmake/recursive_include Executable file
View File

@ -0,0 +1,178 @@
#!/usr/bin/env python
"""
A script for finding and [copying|printing] C++
headers that get recursively imported from one (or more)
starting points.
Supports absolute imports relative to some root folder
(project root) and relative imports relative to the
header that is doing the importing.
Does not support conditional imports (resulting from
#ifdef macros and such). All the #import statements
found in a header (one #import per line) are traversed.
Supports Python2 and Python3.
"""
__author__ = "Florijan Stamenkovic"
__copyright__ = "Copyright 2017, Memgraph"
import logging
import sys
import os
import re
import shutil
from argparse import ArgumentParser
# the prefix of an include directive
PREFIX = "#include"
log = logging.getLogger(__name__)
def parse_args():
argp = ArgumentParser(description=__doc__)
argp.add_argument("--logging", default="INFO", choices=["INFO", "DEBUG"],
help="Logging level")
argp.add_argument("--roots", required=True, nargs="+",
help="One or more paths in which headers are sought")
argp.add_argument("--start", required=True, nargs="+",
help="One or more headers from which to start scanning")
argp.add_argument("--stdout", action="store_true",
help="If found paths should be printed out to stdout")
argp.add_argument("--copy", default=None,
help="Prefix of the path where the headers should be copied")
return argp.parse_args()
def main():
args = parse_args()
logging.basicConfig(level=args.logging)
log.info("Recursively detecting used C/C++ headers in roots '%s' with starting point(s) '%s'",
args.roots, args.start)
args.roots = [os.path.abspath(p) for p in args.roots]
results = set()
for start in args.start:
find_recursive(start, args.roots, results)
results = list(sorted(results))
log.debug("Found %d paths:", len(results))
for r in results:
log.debug("\t%s", r)
# print out the results if required
if args.stdout:
for result in results:
print(result)
# copy the results if required
if args.copy is not None:
for root, path in results:
from_path = os.path.join(root, path)
to_path = os.path.join(args.copy, path)
log.debug("Copying '%s' to '%s'", from_path, to_path)
# create a directory if necessary, Py2 and Py3 compatible
to_dir = os.path.dirname(to_path)
if not os.path.exists(to_dir):
os.makedirs(to_dir)
shutil.copy(from_path, to_path)
def abs_to_relative(roots, path):
"""
Args:
roots: list of str, a list of possible prefixes
to the 'path'.
path: str, a path to a file.
Return:
A tuple (relative_path, root) where 'root' is one
of the given 'roots' and where
os.path.join(root, relative_path) equals the given
'path'
Raise:
An exception if none of the 'roots' is a prefix of
'path'
"""
for root in roots:
if path.startswith(root):
return (root, path[len(root) + 1:])
raise Exception("Failed to find prefix of '%s'in '%r'" % (
path, roots))
def find_recursive(path, roots, results):
"""
Recursivelly looks for headers and adds them to results.
Results are added as tuples of form (root, header_path)
where 'root' is one of the given roots: the one in which
the header was found, and 'header_path' is the found
header's path relative to 'root'.
Args:
path: str of tuple. If str, it's considered a path
that has one of the roots as prefix. If tuple
it's considered a (prefix, suffix) that defines
a path.
In both forms the path is to a header. This header is added
to results and scanned for #include statements of
other headers. For each #include (relative to current
`path` or to `project_root`) for which a file is found
this same function is called.
roots: list of str, List of folders in which headers are
sought. Must be absolute paths.
results: a collection into which the results are
added. The collection contains tuples of form
(root, path), see function description.
"""
log.debug("Processing path: %s", path)
if isinstance(path, str):
path = os.path.abspath(path)
path = abs_to_relative(roots, path)
# from this point onward 'path' is a tuple (root, suffix)
if path in results:
log.debug("Skipping already present path '%r'", path)
return
log.debug("Adding path '%r'", path)
results.add(path)
# go through files and look for include directives
with open(os.path.join(*path)) as f:
for line in filter(lambda l: l.startswith(PREFIX),
map(lambda l: l.strip(), f)):
include = line[len(PREFIX):].strip()
include = re.sub("[\"\'\>\<]", "", include)
log.debug("Processing include '%s'", include)
# search for the include relatively to the current header
include_rel = os.path.join(
os.path.dirname(os.path.join(*path)), include)
if os.path.exists(include_rel) and os.path.isfile(include_rel):
find_recursive(include_rel, roots, results)
continue
# search for file in roots
for root in roots:
include_abs = os.path.join(root, include)
if os.path.exists(include_abs) and os.path.isfile(include_abs):
find_recursive((root, include), roots, results)
continue
if __name__ == '__main__':
main()

View File

@ -1,146 +0,0 @@
#!/usr/bin/env python
"""
A script for finding and [copying|printing] C++
headers that get recursively imported from one (or more)
starting points.
Supports absolute imports relative to some root folder
(project root) and relative imports relative to the
header that is doing the importing.
Does not support conditional imports (resulting from
#ifdef macros and such). All the #import statements
found in a header (one #import per line) are traversed.
Supports Python2 and Python3.
"""
__author__ = "Florijan Stamenkovic"
__copyright__ = "Copyright 2017, Memgraph"
import logging
import sys
import os
import re
import shutil
from argparse import ArgumentParser
# the prefix of an include directive
PREFIX = "#include"
log = logging.getLogger(__name__)
def parse_args():
argp = ArgumentParser(description=__doc__)
argp.add_argument("--logging", default="INFO", choices=["INFO", "DEBUG"],
help="Logging level")
argp.add_argument("--root", required=True,
help="Root path of the header tree (project root)")
argp.add_argument("--start", required=True, nargs="+",
help="One or more headers from which to start scanning")
argp.add_argument("--stdout", action="store_true",
help="If found paths should be printed out to stdout")
argp.add_argument("--copy", default=None,
help="Prefix of the path where the headers should be copied")
return argp.parse_args()
def main():
args = parse_args()
logging.basicConfig(level=args.logging)
log.info("Recursively detecting used C/C++ headers in root '%s' with starting point(s) '%s'",
args.root, args.start)
results = set()
for start in args.start:
find_recursive(start, args.root, results)
results = list(sorted(results))
log.debug("Found %d paths:", len(results))
for r in results:
log.debug("\t%s", r)
# print out the results if required
if args.stdout:
for result in results:
print(result)
# copy the results if required
if args.copy is not None:
for result in results:
from_path = os.path.join(args.root, result)
to_path = os.path.join(args.copy, result)
log.debug("Copying '%s' to '%s'", from_path, to_path)
# create a directory if necessary, Py2 and Py3 compatible
to_dir = os.path.dirname(to_path)
if not os.path.exists(to_dir):
os.makedirs(to_dir)
shutil.copy(from_path, to_path)
def find_recursive(path, project_root, results):
"""
Recursivelly looks for headers and adds them to results.
The headers are added as paths relative to the given
`project_root`
Args:
path: str, path to a header. This header is added
to results and scanned for #include statements of
other headers. For each #include (relative to current
`path` or to `project_root`) for which a file is found
this same function is called.
project_root: str, path to a project root. Used for
finding headers included using an absolute path
(that is actually relative to project_root).
results: a collection into which the results are
added.
"""
log.debug("Processing path: %s", path)
path_abs = os.path.abspath(path)
root_abs = os.path.abspath(project_root)
if not path_abs.startswith(root_abs):
log.warning("Project root '%s' not prefix of path '%s'",
root_abs, path_abs)
path_rel = path_abs[len(root_abs) + 1:]
log.debug("Rel path is '%s'", path_rel)
if path_rel in results:
log.debug("Skipping already present path '%s'", path_rel)
return
log.debug("Adding path '%s'", path_rel)
results.add(path_rel)
# go through files and look for include directives
with open(path_abs) as f:
for line in filter(lambda l: l.startswith(PREFIX),
map(lambda l: l.strip(), f)):
include = line[len(PREFIX):].strip()
include = re.sub("[\"\']", "", include)
log.debug("Processing include '%s'", include)
# check if the file exists relative to this file
# or absolutely to project root
include_abs = os.path.join(project_root, include)
if os.path.exists(include_abs):
find_recursive(include_abs, project_root, results)
include_rel = os.path.join(os.path.dirname(path_abs), include)
if os.path.exists(include_rel):
find_recursive(include_rel, project_root, results)
if __name__ == '__main__':
main()

View File

@ -23,7 +23,6 @@ option(YAML_CPP_BUILD_TOOLS "" OFF)
add_subdirectory(yaml-cpp)
# setup cppitertools
include_directories(cppitertools)
# CLion compatiblity; the target won't be built
file(GLOB __CPPITERTOOLS_SOURCES __main.cpp
${CMAKE_SOURCE_DIR}/libs/cppitertools/*.hpp)

View File

@ -6,6 +6,7 @@
#include "io/network/socket.hpp"
#include "database/graph_db.hpp"
#include "database/graph_db_accessor.hpp"
#include "storage/property_value_store.hpp"
#include <cassert>

View File

@ -10,9 +10,7 @@
// forward declaring Edge and Vertex because they use
// GraphDb::Label etc., and therefore include this header
class Vertex;
class VertexAccessor;
class Edge;
class EdgeAccessor;
// TODO: Maybe split this in another layer between Db and Dbms. Where the new
// layer would hold SnapshotEngine and his kind of concept objects. Some

View File

@ -49,21 +49,6 @@ void GraphDbAccessor::detach_remove_vertex(VertexAccessor& vertex_accessor) {
vertex_accessor.vlist_.remove(&vertex_accessor.update(), transaction_);
}
std::vector<VertexAccessor> GraphDbAccessor::vertices() {
auto sl_accessor = db_.vertices_.access();
std::vector<VertexAccessor> accessors;
accessors.reserve(sl_accessor.size());
for (auto vlist : sl_accessor) {
auto record = vlist->find(transaction_);
if (record == nullptr) continue;
accessors.emplace_back(*vlist, *record, *this);
}
return accessors;
}
EdgeAccessor GraphDbAccessor::insert_edge(VertexAccessor& from,
VertexAccessor& to,
GraphDb::EdgeType edge_type) {
@ -105,21 +90,6 @@ void GraphDbAccessor::remove_edge(EdgeAccessor& edge_accessor) {
edge_accessor.vlist_.remove(&edge_accessor.update(), transaction_);
}
std::vector<EdgeAccessor> GraphDbAccessor::edges() {
auto sl_accessor = db_.edges_.access();
std::vector<EdgeAccessor> accessors;
accessors.reserve(sl_accessor.size());
for (auto vlist : sl_accessor) {
auto record = vlist->find(transaction_);
if (record == nullptr) continue;
accessors.emplace_back(*vlist, *record, *this);
}
return accessors;
}
GraphDb::Label GraphDbAccessor::label(const std::string& label_name) {
return &(*db_.labels_.access().insert(label_name).first);
}

View File

@ -5,9 +5,15 @@
#pragma once
#include "cppitertools/imap.hpp"
#include "cppitertools/filter.hpp"
#include "graph_db.hpp"
#include "transactions/transaction.hpp"
#include "storage/vertex_accessor.hpp"
#include "storage/edge_accessor.hpp"
/**
* An accessor for the database object: exposes functions
* for operating on the database. All the functions in
@ -56,11 +62,20 @@ class GraphDbAccessor {
void detach_remove_vertex(VertexAccessor& vertex_accessor);
/**
* Returns accessors to all the vertices in the graph.
* TODO: switch to the Iterator library and map function.
* @return
* Returns iterable over accessors to all the vertices in the graph
* visible to the current transaction.
*/
std::vector<VertexAccessor> vertices();
auto vertices() {
// filter out the accessors not visible to the current transaction
auto filtered = iter::filter([this](auto vlist) {
return vlist->find(this->transaction_) != nullptr;
}, db_.vertices_.access());
// return accessors of the filtered out vlists
return iter::imap([this](auto vlist) {
return VertexAccessor(*vlist, *this);
}, std::move(filtered));
}
/**
* Creates a new Edge and returns an accessor to it.
@ -81,11 +96,20 @@ class GraphDbAccessor {
void remove_edge(EdgeAccessor& edge_accessor);
/**
* Returns accessors to all the edges in the graph.
* TODO: switch to the Iterator library and map function.
* @return
* Returns iterable over accessors to all the edges in the graph
* visible to the current transaction.
*/
std::vector<EdgeAccessor> edges();
auto edges() {
// filter out the accessors not visible to the current transaction
auto filtered = iter::filter([this](auto vlist) {
return vlist->find(transaction_) != nullptr;
}, db_.edges_.access());
// return accessors of the filtered out vlists
return iter::imap([this](auto vlist) {
return EdgeAccessor(*vlist, *this);
}, std::move(filtered));
}
/**
* Obtains the Label for the label's name.

View File

@ -1,4 +1,5 @@
#include "storage/record_accessor.hpp"
#include "database/graph_db_accessor.hpp"
#include "storage/edge.hpp"
#include "storage/vertex.hpp"
#include "utils/assert.hpp"

View File

@ -1,13 +1,15 @@
#pragma once
#include "database/graph_db.hpp"
#include "database/graph_db_accessor.hpp"
//#include "database/graph_db_accessor.hpp"
#include "mvcc/version_list.hpp"
#include "storage/property_value.hpp"
#include "utils/pass_key.hpp"
#include "storage/property_value_store.hpp"
class GraphDbAccessor;
/**
* An accessor to a database record (an Edge or a Vertex).
*

View File

@ -1,21 +1,20 @@
#pragma once
#include "cppitertools/imap.hpp"
/**
* Creates a vector of records accessors (Edge or Vertex).
* Creates an iterator over record accessors (Edge or Vertex).
*
* @tparam TAccessor The type of accessor to create a vector of.
* @tparam TCollection An iterable of pointers to version list objects.
* @tparam TAccessor The exact type of accessor.
* @tparam TIterable An iterable of pointers to version list objects.
*
* @param records An iterable of version list pointers for which accessors
* need to be created.
* @param db_accessor A database accessor to create the record accessors with.
*/
template <typename TAccessor, typename TCollection>
std::vector<TAccessor> make_accessors(const TCollection &records,
GraphDbAccessor &db_accessor) {
std::vector<TAccessor> accessors;
accessors.reserve(records.size());
for (auto record : records) accessors.emplace_back(*record, db_accessor);
return accessors;
template <typename TAccessor, typename TIterable>
auto make_accessor_iterator(const TIterable &records, GraphDbAccessor &db_accessor) {
return iter::imap([&db_accessor](auto vlist) {
return TAccessor(*vlist, db_accessor);
}, records);
}

View File

@ -36,11 +36,3 @@ bool VertexAccessor::has_label(GraphDb::Label label) const {
const std::vector<GraphDb::Label> &VertexAccessor::labels() const {
return this->view().labels_;
}
std::vector<EdgeAccessor> VertexAccessor::in() {
return make_accessors<EdgeAccessor>(view().in_, db_accessor_);
}
std::vector<EdgeAccessor> VertexAccessor::out() {
return make_accessors<EdgeAccessor>(view().out_, db_accessor_);
}

View File

@ -6,10 +6,9 @@
#include "database/graph_db.hpp"
#include "storage/record_accessor.hpp"
#include "storage/vertex.hpp"
#include "storage/util.hpp"
// forward declaring the EdgeAccessor because it's returned
// by some functions
class EdgeAccessor;
#include "storage/edge_accessor.hpp"
/**
* Provides ways for the client programmer (i.e. code generated
@ -64,13 +63,11 @@ class VertexAccessor : public RecordAccessor<Vertex> {
/**
* Returns EdgeAccessors for all incoming edges.
* @return
*/
std::vector<EdgeAccessor> in();
auto in() { return make_accessor_iterator<EdgeAccessor>(view().in_, db_accessor_); }
/**
* Returns EdgeAccessors for all outgoing edges.
* @return
*/
std::vector<EdgeAccessor> out();
auto out() { return make_accessor_iterator<EdgeAccessor>(view().out_, db_accessor_); }
};

View File

@ -0,0 +1,58 @@
//
// Copyright 2017 Memgraph
// Created by Florijan Stamenkovic on 02.03.17.
//
#pragma once
#include "enums.hpp"
#include "path.hpp"
#include "templates.hpp"
#include "storage/vertex_accessor.hpp"
#include "storage/edge_accessor.hpp"
/**
* A specialization of the "traversal" namespace that uses
* real DB VertexAccessor and EdgeAccessor classes.
*/
namespace traversal {
// expose Path and Paths as class template instantiations
using Path = traversal_template::Path<VertexAccessor, EdgeAccessor>;
using Paths = traversal_template::Paths<VertexAccessor, EdgeAccessor>;
/**
* Specialization of the traversal_template::Begin function.
*/
template<typename TCollection>
auto Begin(const TCollection &vertices, std::function<bool(const VertexAccessor &)> vertex_filter = {}) {
return traversal_template::Begin<TCollection, VertexAccessor, EdgeAccessor>(vertices, vertex_filter);
}
/**
* Specialization of the traversal_template::Cartesian function that accepts
* a single argument.
*/
template<typename TVisitable>
auto Cartesian(TVisitable &&visitable) {
return traversal_template::Cartesian<TVisitable, VertexAccessor, EdgeAccessor>(
std::forward<TVisitable>(visitable));
}
/**
* Specialization of the traversal_template::Cartesian function that accepts
* multiple arguments.
*/
template<typename TVisitableFirst, typename... TVisitableOthers>
auto Cartesian(TVisitableFirst &&first, TVisitableOthers &&... others) {
return traversal_template::CartesianBinaryType<TVisitableFirst, decltype(Cartesian(
std::forward<TVisitableOthers>(others)...)),
VertexAccessor, EdgeAccessor>(
std::forward<TVisitableFirst>(first),
Cartesian(std::forward<TVisitableOthers>(others)...)
);
}
}

View File

@ -124,8 +124,13 @@ bool run_general_query(GraphDbAccessor &db_accessor,
else
stream.write_fields({"a.garment_id", "b.garment_id", "c.garment_id",
"d.garment_id", "score"});
std::vector<VertexAccessor> vertices = db_accessor.vertices();
std::vector<EdgeAccessor> edges = db_accessor.edges();
// TODO dgleich: this code is very inefficient as it first makes a copy
// of all the vertices/edges, and filters aftwarwards. I warned about this
// happening in code review!!!
auto vertices_iterator = db_accessor.vertices();
auto edge_iterator = db_accessor.edges();
std::vector<VertexAccessor> vertices(vertices_iterator.begin(), vertices_iterator.end());
std::vector<EdgeAccessor> edges(edge_iterator.begin(), edge_iterator.end());
std::vector<VertexAccessor *> vertices_indexed;
std::vector<EdgeAccessor *> edges_indexed;
@ -180,7 +185,7 @@ bool run_general_query(GraphDbAccessor &db_accessor,
* @param edges edges from which to update bitset.
*/
auto update = [&db_accessor, &query](Bitset<int64_t> &bitset,
const std::vector<EdgeAccessor> &edges) {
auto &&edges) {
for (auto e : edges) {
if (e.edge_type() != db_accessor.edge_type("default_outfit")) continue;
const int from = query(e.from());

View File

@ -79,32 +79,32 @@ TEST(GraphDbAccessorTest, InsertEdge) {
auto va1 = dba.insert_vertex();
auto va2 = dba.insert_vertex();
EXPECT_EQ(va1.in().size(), 0);
EXPECT_EQ(va1.out().size(), 0);
EXPECT_EQ(va2.in().size(), 0);
EXPECT_EQ(va2.out().size(), 0);
EXPECT_EQ(va1.in_degree(), 0);
EXPECT_EQ(va1.out_degree(), 0);
EXPECT_EQ(va2.in_degree(), 0);
EXPECT_EQ(va2.out_degree(), 0);
// setup (v1) - [:likes] -> (v2)
dba.insert_edge(va1, va2, dba.edge_type("likes"));
EXPECT_EQ(CountEdges(dba), 1);
EXPECT_EQ(va1.out()[0].to(), va2);
EXPECT_EQ(va2.in()[0].from(), va1);
EXPECT_EQ(va1.in().size(), 0);
EXPECT_EQ(va1.out().size(), 1);
EXPECT_EQ(va2.in().size(), 1);
EXPECT_EQ(va2.out().size(), 0);
EXPECT_EQ(va1.out().begin()->to(), va2);
EXPECT_EQ(va2.in().begin()->from(), va1);
EXPECT_EQ(va1.in_degree(), 0);
EXPECT_EQ(va1.out_degree(), 1);
EXPECT_EQ(va2.in_degree(), 1);
EXPECT_EQ(va2.out_degree(), 0);
// setup (v1) - [:likes] -> (v2) <- [:hates] - (v3)
auto va3 = dba.insert_vertex();
dba.insert_edge(va3, va2, dba.edge_type("hates"));
EXPECT_EQ(CountEdges(dba), 2);
EXPECT_EQ(va3.out()[0].to(), va2);
EXPECT_EQ(va1.in().size(), 0);
EXPECT_EQ(va1.out().size(), 1);
EXPECT_EQ(va2.in().size(), 2);
EXPECT_EQ(va2.out().size(), 0);
EXPECT_EQ(va3.in().size(), 0);
EXPECT_EQ(va3.out().size(), 1);
EXPECT_EQ(va3.out().begin()->to(), va2);
EXPECT_EQ(va1.in_degree(), 0);
EXPECT_EQ(va1.out_degree(), 1);
EXPECT_EQ(va2.in_degree(), 2);
EXPECT_EQ(va2.out_degree(), 0);
EXPECT_EQ(va3.in_degree(), 0);
EXPECT_EQ(va3.out_degree(), 1);
}
TEST(GraphDbAccessorTest, RemoveEdge) {
@ -139,14 +139,14 @@ TEST(GraphDbAccessorTest, RemoveEdge) {
// ensure correct connectivity for all the vertices
for (auto vertex : dba3.vertices()) {
if (vertex == v1) {
EXPECT_EQ(vertex.in().size(), 0);
EXPECT_EQ(vertex.out().size(), 1);
EXPECT_EQ(vertex.in_degree(), 0);
EXPECT_EQ(vertex.out_degree(), 1);
} else if (vertex == v2) {
EXPECT_EQ(vertex.in().size(), 1);
EXPECT_EQ(vertex.out().size(), 0);
EXPECT_EQ(vertex.in_degree(), 1);
EXPECT_EQ(vertex.out_degree(), 0);
} else {
EXPECT_EQ(vertex.in().size(), 0);
EXPECT_EQ(vertex.out().size(), 0);
EXPECT_EQ(vertex.in_degree(), 0);
EXPECT_EQ(vertex.out_degree(), 0);
}
}
}

View File

@ -74,23 +74,6 @@ TEST(RecordAccessor, RecordLessThan) {
EXPECT_TRUE(e1 < e2 || e2 < e1);
EXPECT_FALSE(e1 < e1);
EXPECT_FALSE(e2 < e2);
std::vector<VertexAccessor> vertices = dba.vertices();
std::vector<VertexAccessor*> A;
for (int i = 0; i < vertices.size(); ++i) A.push_back(&vertices[i]);
std::sort(A.begin(), A.end(),
[](const VertexAccessor* a, const VertexAccessor* b) -> bool {
return *a < *b;
});
std::vector<VertexAccessor*> B;
for (int i = 0; i < vertices.size(); ++i) B.push_back(&vertices[i]);
std::sort(B.begin(), B.end(),
[](const VertexAccessor* a, const VertexAccessor* b) -> bool {
return *a < *b;
});
for (int i = 0; i < A.size(); ++i) EXPECT_EQ(*A[i], *B[i]);
}
TEST(RecordAccessor, VertexLabels) {
@ -164,10 +147,10 @@ TEST(RecordAccessor, VertexEdgeConnections) {
EXPECT_EQ(edge.to(), v2);
EXPECT_NE(edge.to(), v1);
EXPECT_EQ(v1.in().size(), 0);
EXPECT_EQ(v1.out().size(), 1);
EXPECT_EQ(v2.in().size(), 1);
EXPECT_EQ(v2.out().size(), 0);
EXPECT_EQ(v1.in_degree(), 0);
EXPECT_EQ(v1.out_degree(), 1);
EXPECT_EQ(v2.in_degree(), 1);
EXPECT_EQ(v2.out_degree(), 0);
for (auto e : v1.out()) EXPECT_EQ(edge, e);

View File

@ -135,7 +135,6 @@ auto Cartesian(TVisitable &&visitable) {
* Specialization of the traversal_template::Cartesian function that accepts
* multiple arguments.
*/
// TODO (code-review) can this be specialized more elegantly?
template<typename TVisitableFirst, typename... TVisitableOthers>
auto Cartesian(TVisitableFirst &&first, TVisitableOthers &&... others) {
return traversal_template::CartesianBinaryType<TVisitableFirst, decltype(Cartesian(