Clean Memgraph repository (#12)
* Update documentation * Update structure, CODEOWNERS, license files * Update release/README * Stop Actions if only docs file were updated * Remove unused hidden files, move Doxygen logo
This commit is contained in:
parent
98f83e0c88
commit
814bb66ea6
@ -1,7 +0,0 @@
|
|||||||
{
|
|
||||||
"project_id" : "memgraph",
|
|
||||||
"conduit_uri" : "https://phabricator.memgraph.io",
|
|
||||||
"phabricator_uri" : "https://phabricator.memgraph.io",
|
|
||||||
"git.default-relative-commit": "origin/master",
|
|
||||||
"arc.land.onto.default": "master"
|
|
||||||
}
|
|
16
.arclint
16
.arclint
@ -1,16 +0,0 @@
|
|||||||
{
|
|
||||||
"linters": {
|
|
||||||
"clang-tidy": {
|
|
||||||
"type": "script-and-regex",
|
|
||||||
"include": "(\\.(cpp|cc|cxx|c|h|hpp|lcp)$)",
|
|
||||||
"script-and-regex.script": "./tools/arc-clang-tidy",
|
|
||||||
"script-and-regex.regex": "/^(?P<file>.*):(?P<line>\\d+):(?P<char>\\d+): (?P<severity>warning|error): (?P<message>.*)$/m"
|
|
||||||
},
|
|
||||||
"clang-format": {
|
|
||||||
"type": "script-and-regex",
|
|
||||||
"include": "(\\.(cpp|cc|cxx|c|h|hpp)$)",
|
|
||||||
"script-and-regex.script": "./tools/arc-clang-format",
|
|
||||||
"script-and-regex.regex": "/^(?P<severity>warning):(?P<offset>\\d+):(?P<message>.*)$/m"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
6
.github/workflows/diff.yaml
vendored
6
.github/workflows/diff.yaml
vendored
@ -1,6 +1,10 @@
|
|||||||
name: Diff
|
name: Diff
|
||||||
|
|
||||||
on: [push]
|
on:
|
||||||
|
push:
|
||||||
|
paths-ignore:
|
||||||
|
- 'docs/**'
|
||||||
|
- '*.md'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
community_build:
|
community_build:
|
||||||
|
@ -1,172 +0,0 @@
|
|||||||
import os
|
|
||||||
import os.path
|
|
||||||
import fnmatch
|
|
||||||
import logging
|
|
||||||
import ycm_core
|
|
||||||
|
|
||||||
BASE_FLAGS = [
|
|
||||||
'-Wall',
|
|
||||||
'-Wextra',
|
|
||||||
'-Werror',
|
|
||||||
'-Wno-long-long',
|
|
||||||
'-Wno-variadic-macros',
|
|
||||||
'-fexceptions',
|
|
||||||
'-ferror-limit=10000',
|
|
||||||
'-std=c++1z',
|
|
||||||
'-xc++',
|
|
||||||
'-I/usr/lib/',
|
|
||||||
'-I/usr/include/',
|
|
||||||
'-I./src',
|
|
||||||
'-I./include',
|
|
||||||
'-I./libs/fmt',
|
|
||||||
'-I./libs/yaml-cpp',
|
|
||||||
'-I./libs/glog/include',
|
|
||||||
'-I./libs/googletest/googletest/include',
|
|
||||||
'-I./libs/googletest/googlemock/include',
|
|
||||||
'-I./libs/benchmark/include',
|
|
||||||
'-I./libs/cereal/include',
|
|
||||||
# We include cppitertools headers directly from libs directory.
|
|
||||||
'-I./libs',
|
|
||||||
'-I./libs/rapidcheck/include',
|
|
||||||
'-I./libs/antlr4/runtime/Cpp/runtime/src',
|
|
||||||
'-I./libs/gflags/include',
|
|
||||||
'-I./experimental/distributed/src',
|
|
||||||
'-I./libs/postgresql/include',
|
|
||||||
'-I./libs/bzip2',
|
|
||||||
'-I./libs/zlib',
|
|
||||||
'-I./libs/rocksdb/include',
|
|
||||||
'-I./libs/librdkafka/include/librdkafka',
|
|
||||||
'-I./build/include'
|
|
||||||
]
|
|
||||||
|
|
||||||
SOURCE_EXTENSIONS = [
|
|
||||||
'.cpp',
|
|
||||||
'.cxx',
|
|
||||||
'.cc',
|
|
||||||
'.c',
|
|
||||||
'.m',
|
|
||||||
'.mm'
|
|
||||||
]
|
|
||||||
|
|
||||||
HEADER_EXTENSIONS = [
|
|
||||||
'.h',
|
|
||||||
'.hxx',
|
|
||||||
'.hpp',
|
|
||||||
'.hh'
|
|
||||||
]
|
|
||||||
|
|
||||||
# set the working directory of YCMD to be this file
|
|
||||||
os.chdir(os.path.dirname(os.path.realpath(__file__)))
|
|
||||||
|
|
||||||
def IsHeaderFile(filename):
|
|
||||||
extension = os.path.splitext(filename)[1]
|
|
||||||
return extension in HEADER_EXTENSIONS
|
|
||||||
|
|
||||||
def GetCompilationInfoForFile(database, filename):
|
|
||||||
if IsHeaderFile(filename):
|
|
||||||
basename = os.path.splitext(filename)[0]
|
|
||||||
for extension in SOURCE_EXTENSIONS:
|
|
||||||
replacement_file = basename + extension
|
|
||||||
if os.path.exists(replacement_file):
|
|
||||||
compilation_info = database.GetCompilationInfoForFile(replacement_file)
|
|
||||||
if compilation_info.compiler_flags_:
|
|
||||||
return compilation_info
|
|
||||||
return None
|
|
||||||
return database.GetCompilationInfoForFile(filename)
|
|
||||||
|
|
||||||
def FindNearest(path, target):
|
|
||||||
candidate = os.path.join(path, target)
|
|
||||||
if(os.path.isfile(candidate) or os.path.isdir(candidate)):
|
|
||||||
logging.info("Found nearest " + target + " at " + candidate)
|
|
||||||
return candidate;
|
|
||||||
else:
|
|
||||||
parent = os.path.dirname(os.path.abspath(path));
|
|
||||||
if(parent == path):
|
|
||||||
raise RuntimeError("Could not find " + target);
|
|
||||||
return FindNearest(parent, target)
|
|
||||||
|
|
||||||
def MakeRelativePathsInFlagsAbsolute(flags, working_directory):
|
|
||||||
if not working_directory:
|
|
||||||
return list(flags)
|
|
||||||
new_flags = []
|
|
||||||
make_next_absolute = False
|
|
||||||
path_flags = [ '-isystem', '-I', '-iquote', '--sysroot=' ]
|
|
||||||
for flag in flags:
|
|
||||||
new_flag = flag
|
|
||||||
|
|
||||||
if make_next_absolute:
|
|
||||||
make_next_absolute = False
|
|
||||||
if not flag.startswith('/'):
|
|
||||||
new_flag = os.path.join(working_directory, flag)
|
|
||||||
|
|
||||||
for path_flag in path_flags:
|
|
||||||
if flag == path_flag:
|
|
||||||
make_next_absolute = True
|
|
||||||
break
|
|
||||||
|
|
||||||
if flag.startswith(path_flag):
|
|
||||||
path = flag[ len(path_flag): ]
|
|
||||||
new_flag = path_flag + os.path.join(working_directory, path)
|
|
||||||
break
|
|
||||||
|
|
||||||
if new_flag:
|
|
||||||
new_flags.append(new_flag)
|
|
||||||
return new_flags
|
|
||||||
|
|
||||||
|
|
||||||
def FlagsForClangComplete(root):
|
|
||||||
try:
|
|
||||||
clang_complete_path = FindNearest(root, '.clang_complete')
|
|
||||||
clang_complete_flags = open(clang_complete_path, 'r').read().splitlines()
|
|
||||||
return clang_complete_flags
|
|
||||||
except:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def FlagsForInclude(root):
|
|
||||||
try:
|
|
||||||
include_path = FindNearest(root, 'include')
|
|
||||||
flags = []
|
|
||||||
for dirroot, dirnames, filenames in os.walk(include_path):
|
|
||||||
for dir_path in dirnames:
|
|
||||||
real_path = os.path.join(dirroot, dir_path)
|
|
||||||
flags = flags + ["-I" + real_path]
|
|
||||||
return flags
|
|
||||||
except:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def FlagsForCompilationDatabase(root, filename):
|
|
||||||
try:
|
|
||||||
compilation_db_path = FindNearest(root, 'compile_commands.json')
|
|
||||||
compilation_db_dir = os.path.dirname(compilation_db_path)
|
|
||||||
logging.info("Set compilation database directory to " + compilation_db_dir)
|
|
||||||
compilation_db = ycm_core.CompilationDatabase(compilation_db_dir)
|
|
||||||
if not compilation_db:
|
|
||||||
logging.info("Compilation database file found but unable to load")
|
|
||||||
return None
|
|
||||||
compilation_info = GetCompilationInfoForFile(compilation_db, filename)
|
|
||||||
if not compilation_info:
|
|
||||||
logging.info("No compilation info for " + filename + " in compilation database")
|
|
||||||
return None
|
|
||||||
return MakeRelativePathsInFlagsAbsolute(
|
|
||||||
compilation_info.compiler_flags_,
|
|
||||||
compilation_info.compiler_working_dir_)
|
|
||||||
except:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def FlagsForFile(filename):
|
|
||||||
root = os.path.realpath(filename);
|
|
||||||
compilation_db_flags = FlagsForCompilationDatabase(root, filename)
|
|
||||||
if compilation_db_flags:
|
|
||||||
final_flags = compilation_db_flags
|
|
||||||
else:
|
|
||||||
final_flags = BASE_FLAGS
|
|
||||||
clang_flags = FlagsForClangComplete(root)
|
|
||||||
if clang_flags:
|
|
||||||
final_flags = final_flags + clang_flags
|
|
||||||
include_flags = FlagsForInclude(root)
|
|
||||||
if include_flags:
|
|
||||||
final_flags = final_flags + include_flags
|
|
||||||
return {
|
|
||||||
'flags': final_flags,
|
|
||||||
'do_cache': True
|
|
||||||
}
|
|
4
CODEOWNERS
Normal file
4
CODEOWNERS
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
/docs/ @gitbuda
|
||||||
|
/src/communication/ @antonio2368
|
||||||
|
/src/query/ @the-joksim
|
||||||
|
/src/storage/ @antonio2368
|
2
Doxyfile
2
Doxyfile
@ -51,7 +51,7 @@ PROJECT_BRIEF = "The World's Most Powerful Graph Database"
|
|||||||
# pixels and the maximum width should not exceed 200 pixels. Doxygen will copy
|
# pixels and the maximum width should not exceed 200 pixels. Doxygen will copy
|
||||||
# the logo to the output directory.
|
# the logo to the output directory.
|
||||||
|
|
||||||
PROJECT_LOGO = Doxylogo.png
|
PROJECT_LOGO = docs/doxygen/memgraph_logo.png
|
||||||
|
|
||||||
# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path
|
# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path
|
||||||
# into which the generated documentation will be written. If a relative path is
|
# into which the generated documentation will be written. If a relative path is
|
||||||
|
55
README.md
55
README.md
@ -1,24 +1,47 @@
|
|||||||
# memgraph
|
# Memgraph
|
||||||
|
|
||||||
Memgraph is an ACID compliant high performance transactional distributed
|
Memgraph is an ACID compliant high performance transactional distributed
|
||||||
in-memory graph database featuring runtime native query compiling, lock free
|
in-memory graph database featuring runtime native query compiling, lock free
|
||||||
data structures, multi-version concurrency control and asynchronous IO.
|
data structures, multi-version concurrency control and asynchronous IO.
|
||||||
|
|
||||||
## dependencies
|
## Development Documentation
|
||||||
|
|
||||||
Memgraph can be compiled using any modern c++ compiler. It mostly relies on
|
* [Quick Start](docs/dev/quick-start.md)
|
||||||
the standard template library, however, some things do require external
|
* [Workflow](docs/dev/workflow.md)
|
||||||
libraries.
|
* [Storage](docs/dev/storage/v2/contents.md)
|
||||||
|
* [Query Engine](docs/dev/query/contents.md)
|
||||||
|
* [Communication](docs/dev/communication/contents.md)
|
||||||
|
* [Lisp C++ Preprocessor (LCP)](docs/dev/lcp.md)
|
||||||
|
|
||||||
Some code contains linux-specific libraries and the build is only supported
|
## Feature Specifications
|
||||||
on a 64 bit linux kernel.
|
|
||||||
|
|
||||||
* linux
|
Each prominent Memgraph feature requires a feature specification. The purpose
|
||||||
* clang 3.8 (good c++11 support, especially lock free atomics)
|
of the feature specification is to have a base for discussing all aspects of
|
||||||
* antlr (compiler frontend)
|
the feature. Elements of feature specifications should be:
|
||||||
* cppitertools
|
|
||||||
* fmt format
|
* High-level context.
|
||||||
* google benchmark
|
* Interface.
|
||||||
* google test
|
* User stories. Usage from the end-user perspective. In the case of a library,
|
||||||
* glog
|
that should be cases on how to use the programming interface. In the case of
|
||||||
* gflags
|
a shell script, that should be cases on how to use flags.
|
||||||
|
* Discussion about concurrency, memory management, error management.
|
||||||
|
* Any other essential functional or non-functional requirements.
|
||||||
|
* Test and benchmark strategy.
|
||||||
|
* Possible future changes/improvements/extensions.
|
||||||
|
* Security concerns.
|
||||||
|
* Additional and/or optional implementation details.
|
||||||
|
|
||||||
|
It's crucial to keep feature spec up-to-date with the implementation. Take a
|
||||||
|
look at the list of [feature specifications](docs/feature_spec/contents.md) to
|
||||||
|
learn more about powerful Memgraph features.
|
||||||
|
|
||||||
|
## User Documentation
|
||||||
|
|
||||||
|
Memgraph user documentation is maintained within
|
||||||
|
[docs](https://github.com/memgraph/docs) repository. The documentation is also
|
||||||
|
available on [GitBook](https://docs.memgraph.com).
|
||||||
|
|
||||||
|
## Licences
|
||||||
|
|
||||||
|
* [Community](release/LICENSE_COMMUNITY.md)
|
||||||
|
* [Enterprise](release/LICENSE_ENTERPRISE.md)
|
||||||
|
5
docs/dev/communication/contents.md
Normal file
5
docs/dev/communication/contents.md
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
# Communication
|
||||||
|
|
||||||
|
## Bolt
|
||||||
|
|
||||||
|
Memgraph implements [Bolt communication protocol](https://7687.org/).
|
@ -1,22 +0,0 @@
|
|||||||
# Snapshots
|
|
||||||
|
|
||||||
A "snapshot" is a record of the current database state stored in permanent
|
|
||||||
storage. Note that the term "snapshot" is used also in the context of
|
|
||||||
the transaction engine to denote a set of running transactions.
|
|
||||||
|
|
||||||
A snapshot is written to the file by Memgraph periodically if so
|
|
||||||
configured. The snapshot creation process is done within a transaction created
|
|
||||||
specifically for that purpose. The transaction is needed to ensure that
|
|
||||||
the stored state is internally consistent.
|
|
||||||
|
|
||||||
The database state can be recovered from the snapshot during startup, if
|
|
||||||
so configured. This recovery works in conjunction with write-ahead log
|
|
||||||
recovery.
|
|
||||||
|
|
||||||
A single snapshot contains all the data needed to recover a database. In
|
|
||||||
that sense snapshots are independent of each other and old snapshots can
|
|
||||||
be deleted once the new ones are safely stored, if it is not necessary
|
|
||||||
to revert the database to some older state.
|
|
||||||
|
|
||||||
The exact format of the snapshot file is defined inline in the snapshot
|
|
||||||
creation code.
|
|
@ -1,16 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
|
||||||
|
|
||||||
mkdir -p $script_dir/html
|
|
||||||
|
|
||||||
for markdown_file in $(find $script_dir -name '*.md'); do
|
|
||||||
name=$(basename -s .md $markdown_file)
|
|
||||||
sed -e 's/.md/.html/' $markdown_file | \
|
|
||||||
pandoc -s -f markdown -t html -o $script_dir/html/$name.html
|
|
||||||
done
|
|
||||||
|
|
||||||
for dot_file in $(find $script_dir -name '*.dot'); do
|
|
||||||
name=$(basename -s .dot $dot_file)
|
|
||||||
dot -Tpng $dot_file -o $script_dir/html/$name.png
|
|
||||||
done
|
|
BIN
docs/dev/query/interpreter-class.png
Normal file
BIN
docs/dev/query/interpreter-class.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 55 KiB |
@ -17,7 +17,7 @@ On ArchLinux or Gentoo, you probably already know what to do.
|
|||||||
After installing `git`, you are now ready to fetch your own copy of Memgraph
|
After installing `git`, you are now ready to fetch your own copy of Memgraph
|
||||||
source code. Run the following command:
|
source code. Run the following command:
|
||||||
|
|
||||||
git clone https://phabricator.memgraph.io/diffusion/MG/memgraph.git
|
git clone https://github.com/memgraph/memgraph.git
|
||||||
|
|
||||||
The above will create a `memgraph` directory and put all source code there.
|
The above will create a `memgraph` directory and put all source code there.
|
||||||
|
|
||||||
@ -52,12 +52,13 @@ After installing the packages, issue the following commands:
|
|||||||
### Compiling
|
### Compiling
|
||||||
|
|
||||||
Memgraph is compiled using our own custom toolchain that can be obtained from
|
Memgraph is compiled using our own custom toolchain that can be obtained from
|
||||||
[Toolchain repository](https://s3-eu-west-1.amazonaws.com/deps.memgraph.io/toolchain). You should read
|
[Toolchain
|
||||||
the `README.txt` file in the repository and install the apropriate toolchain
|
repository](https://s3-eu-west-1.amazonaws.com/deps.memgraph.io/toolchain). You
|
||||||
for your distribution. After you have installed the toolchain you should read
|
should read the `README.txt` file in the repository and install the apropriate
|
||||||
the instructions for the toolchain in the toolchain install directory
|
toolchain for your distribution. After you have installed the toolchain you
|
||||||
(`/opt/toolchain-vXYZ/README.md`) and install dependencies that are necessary
|
should read the instructions for the toolchain in the toolchain install
|
||||||
to run the toolchain.
|
directory (`/opt/toolchain-vXYZ/README.md`) and install dependencies that are
|
||||||
|
necessary to run the toolchain.
|
||||||
|
|
||||||
When you want to compile Memgraph you should activate the toolchain using the
|
When you want to compile Memgraph you should activate the toolchain using the
|
||||||
prepared toolchain activation script that is also described in the toolchain
|
prepared toolchain activation script that is also described in the toolchain
|
||||||
|
@ -38,7 +38,7 @@ interpretation code) should work with accessors. There is a
|
|||||||
`RecordAccessor` as a base class for `VertexAccessor` and
|
`RecordAccessor` as a base class for `VertexAccessor` and
|
||||||
`EdgeAccessor`. Following is an enumeration of their purpose.
|
`EdgeAccessor`. Following is an enumeration of their purpose.
|
||||||
|
|
||||||
### Data access
|
### Data Access
|
||||||
|
|
||||||
The client interacts with Memgraph using the Cypher query language. That
|
The client interacts with Memgraph using the Cypher query language. That
|
||||||
language has certain semantics which imply that multiple versions of the
|
language has certain semantics which imply that multiple versions of the
|
||||||
@ -75,7 +75,7 @@ In distributed Memgraph accessors also contain a lot of the remote graph
|
|||||||
element handling logic. More info on that is available in the
|
element handling logic. More info on that is available in the
|
||||||
documentation for distributed.
|
documentation for distributed.
|
||||||
|
|
||||||
### Deferred MVCC data lookup for Edges
|
### Deferred MVCC Data Lookup for Edges
|
||||||
|
|
||||||
Vertices and edges are versioned using MVCC. This means that for each
|
Vertices and edges are versioned using MVCC. This means that for each
|
||||||
transaction an MVCC lookup needs to be done to determine which version
|
transaction an MVCC lookup needs to be done to determine which version
|
||||||
@ -100,7 +100,7 @@ working with that data directly whenever possible! Always consider the
|
|||||||
accessors to be the first go-to for interacting with data, especially
|
accessors to be the first go-to for interacting with data, especially
|
||||||
when in the context of a transaction.
|
when in the context of a transaction.
|
||||||
|
|
||||||
# Skiplist accessor
|
# Skiplist Accessor
|
||||||
|
|
||||||
The term "accessor" is also used in the context of a skiplist. Every
|
The term "accessor" is also used in the context of a skiplist. Every
|
||||||
operation on a skiplist must be performed within on an
|
operation on a skiplist must be performed within on an
|
6
docs/dev/storage/v1/contents.md
Normal file
6
docs/dev/storage/v1/contents.md
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
# Storage v1
|
||||||
|
|
||||||
|
* [Accessors](accessors.md)
|
||||||
|
* [Indexes](indexes.md)
|
||||||
|
* [Property Storage](property-storage.md)
|
||||||
|
* [Durability](durability.md)
|
@ -1,4 +1,6 @@
|
|||||||
# Write-ahead logging
|
# Durability
|
||||||
|
|
||||||
|
## Write-ahead Logging
|
||||||
|
|
||||||
Typically WAL denotes the process of writing a "log" of database
|
Typically WAL denotes the process of writing a "log" of database
|
||||||
operations (state changes) to persistent storage before committing the
|
operations (state changes) to persistent storage before committing the
|
||||||
@ -53,3 +55,26 @@ inefficient) to recover the database from WAL only, provided all the WAL
|
|||||||
files created from DB start are available. It is not possible to recover
|
files created from DB start are available. It is not possible to recover
|
||||||
partial database state (i.e. from some suffix of WAL files, without the
|
partial database state (i.e. from some suffix of WAL files, without the
|
||||||
preceding snapshot).
|
preceding snapshot).
|
||||||
|
|
||||||
|
## Snapshots
|
||||||
|
|
||||||
|
A "snapshot" is a record of the current database state stored in permanent
|
||||||
|
storage. Note that the term "snapshot" is used also in the context of
|
||||||
|
the transaction engine to denote a set of running transactions.
|
||||||
|
|
||||||
|
A snapshot is written to the file by Memgraph periodically if so
|
||||||
|
configured. The snapshot creation process is done within a transaction created
|
||||||
|
specifically for that purpose. The transaction is needed to ensure that
|
||||||
|
the stored state is internally consistent.
|
||||||
|
|
||||||
|
The database state can be recovered from the snapshot during startup, if
|
||||||
|
so configured. This recovery works in conjunction with write-ahead log
|
||||||
|
recovery.
|
||||||
|
|
||||||
|
A single snapshot contains all the data needed to recover a database. In
|
||||||
|
that sense snapshots are independent of each other and old snapshots can
|
||||||
|
be deleted once the new ones are safely stored, if it is not necessary
|
||||||
|
to revert the database to some older state.
|
||||||
|
|
||||||
|
The exact format of the snapshot file is defined inline in the snapshot
|
||||||
|
creation code.
|
@ -1,10 +1,10 @@
|
|||||||
# Label indexes
|
# Label Indexes
|
||||||
|
|
||||||
These are unsorted indexes that contain all the vertices that have the label
|
These are unsorted indexes that contain all the vertices that have the label
|
||||||
the indexes are for (one index per label). These kinds of indexes get
|
the indexes are for (one index per label). These kinds of indexes get
|
||||||
automatically generated for each label used in the database.
|
automatically generated for each label used in the database.
|
||||||
|
|
||||||
### Updating the indexes
|
### Updating the Indexes
|
||||||
|
|
||||||
Whenever something gets added to the record we update the index (add that
|
Whenever something gets added to the record we update the index (add that
|
||||||
record to index). We keep an index which might contain garbage (not relevant
|
record to index). We keep an index which might contain garbage (not relevant
|
||||||
@ -64,7 +64,7 @@ same order, with (record, vlist) pair
|
|||||||
already superseded by a newer record and as such won't be inserted while
|
already superseded by a newer record and as such won't be inserted while
|
||||||
it's being deleted
|
it's being deleted
|
||||||
|
|
||||||
### Querying the index
|
### Querying the Index
|
||||||
|
|
||||||
We run through the index for the given label and do `vlist.find` operation for
|
We run through the index for the given label and do `vlist.find` operation for
|
||||||
the current transaction, and check if the newest return record has that
|
the current transaction, and check if the newest return record has that
|
||||||
@ -75,7 +75,7 @@ in the index are sorted by their `vlist*` and as such we can filter consecutive
|
|||||||
duplicate `vlist*` to only return one of those while still being able to create
|
duplicate `vlist*` to only return one of those while still being able to create
|
||||||
an iterator to index.
|
an iterator to index.
|
||||||
|
|
||||||
### Cleaning the index
|
### Cleaning the Index
|
||||||
|
|
||||||
Cleaning the index is not as straightforward as it seems as a lot of garbage
|
Cleaning the index is not as straightforward as it seems as a lot of garbage
|
||||||
can accumulate, but it's hard to know when exactly can we delete some (record,
|
can accumulate, but it's hard to know when exactly can we delete some (record,
|
@ -1,7 +1,7 @@
|
|||||||
# Property storage
|
# Property Storage
|
||||||
|
|
||||||
Although the reader is probably familiar with properties in *Memgraph*, let's
|
Although the reader is probably familiar with properties in *Memgraph*, let's
|
||||||
briefly recap.
|
briefly recap.
|
||||||
|
|
||||||
Both vertices and edges can store an arbitrary number of properties. Properties
|
Both vertices and edges can store an arbitrary number of properties. Properties
|
||||||
are, in essence, ordered pairs of property names and property values. Each
|
are, in essence, ordered pairs of property names and property values. Each
|
||||||
@ -21,9 +21,9 @@ must be one of the following types:
|
|||||||
|
|
||||||
Property values are modeled in a class conveniently called `PropertyValue`.
|
Property values are modeled in a class conveniently called `PropertyValue`.
|
||||||
|
|
||||||
## Mapping between property names and property keys.
|
## Mapping Between Property Names and Property Keys.
|
||||||
|
|
||||||
Although users think of property names in terms of descriptive strings
|
Although users think of property names in terms of descriptive strings
|
||||||
(e.g. "location" or "department"), *Memgraph* internally converts those names
|
(e.g. "location" or "department"), *Memgraph* internally converts those names
|
||||||
into property keys which are, essentially, unsigned 16-bit integers.
|
into property keys which are, essentially, unsigned 16-bit integers.
|
||||||
|
|
||||||
@ -49,7 +49,7 @@ An interface of `PropertyValueStore` is as follows:
|
|||||||
`clear` | Clears the storage.
|
`clear` | Clears the storage.
|
||||||
`iterator`| Provides an extension of `std::input_iterator` that iterates over storage.
|
`iterator`| Provides an extension of `std::input_iterator` that iterates over storage.
|
||||||
|
|
||||||
## Storage location
|
## Storage Location
|
||||||
|
|
||||||
By default, *Memgraph* is an in-memory database and all properties are therefore
|
By default, *Memgraph* is an in-memory database and all properties are therefore
|
||||||
stored in working memory unless specified otherwise by the user. User has an
|
stored in working memory unless specified otherwise by the user. User has an
|
||||||
@ -63,12 +63,12 @@ property key has the following format:
|
|||||||
```
|
```
|
||||||
|---location--|------id------|
|
|---location--|------id------|
|
||||||
|-Memory|Disk-|-----2^15-----|
|
|-Memory|Disk-|-----2^15-----|
|
||||||
```
|
```
|
||||||
|
|
||||||
In other words, the most significant bit determines the location where the
|
In other words, the most significant bit determines the location where the
|
||||||
property will be stored.
|
property will be stored.
|
||||||
|
|
||||||
### In-memory storage
|
### In-memory Storage
|
||||||
|
|
||||||
The underlying implementation of in-memory storage for the time being is
|
The underlying implementation of in-memory storage for the time being is
|
||||||
`std::vector<std::pair<Property, PropertyValue>>`. Implementations of`at`, `set`
|
`std::vector<std::pair<Property, PropertyValue>>`. Implementations of`at`, `set`
|
||||||
@ -76,7 +76,7 @@ and `erase` are linear in time. This implementation is arguably more efficient
|
|||||||
than `std::map` or `std::unordered_map` when the average number of properties of
|
than `std::map` or `std::unordered_map` when the average number of properties of
|
||||||
a record is relatively small (up to 10) which seems to be the case.
|
a record is relatively small (up to 10) which seems to be the case.
|
||||||
|
|
||||||
### On-disk storage
|
### On-disk Storage
|
||||||
|
|
||||||
#### KVStore
|
#### KVStore
|
||||||
|
|
||||||
@ -120,12 +120,12 @@ KVStore storage = ...;
|
|||||||
for (auto it = storage.begin("alpha"); it != storage.end("omega"); ++it) {}
|
for (auto it = storage.begin("alpha"); it != storage.end("omega"); ++it) {}
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Data organization on disk
|
#### Data Organization on Disk
|
||||||
|
|
||||||
Each `PropertyValueStore` instance can access a static `KVStore` object that can
|
Each `PropertyValueStore` instance can access a static `KVStore` object that can
|
||||||
store `(key, value)` pairs on disk. The key of each property on disk consists of
|
store `(key, value)` pairs on disk. The key of each property on disk consists of
|
||||||
two parts — a unique identifier (unsigned 64-bit integer) of the current
|
two parts — a unique identifier (unsigned 64-bit integer) of the current
|
||||||
record version (see mvcc docummentation for further clarification) and a
|
record version (see mvcc docummentation for further clarification) and a
|
||||||
property key as described above. The actual value of the property is serialized
|
property key as described above. The actual value of the property is serialized
|
||||||
into a bytestring using bolt `BaseEncoder`. Similarly, deserialization is
|
into a bytestring using bolt `BaseEncoder`. Similarly, deserialization is
|
||||||
performed by bolt `Decoder`.
|
performed by bolt `Decoder`.
|
3
docs/dev/storage/v2/contents.md
Normal file
3
docs/dev/storage/v2/contents.md
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
# Storage v2
|
||||||
|
|
||||||
|
TODO(gitbuda): Write documentation.
|
@ -1,152 +0,0 @@
|
|||||||
# Bootstrapping Compilation Toolchain for Memgraph
|
|
||||||
|
|
||||||
Requirements:
|
|
||||||
|
|
||||||
* libstdc++ shipped with gcc-6.3 or gcc-6.4
|
|
||||||
* cmake >= 3.1, Debian Stretch uses cmake-3.7.2
|
|
||||||
* clang-3.9
|
|
||||||
|
|
||||||
## Installing gcc-6.4
|
|
||||||
|
|
||||||
gcc-6.3 has a bug, so use the 6.4 version which is just a bugfix release.
|
|
||||||
|
|
||||||
Requirements on CentOS 7:
|
|
||||||
|
|
||||||
* wget
|
|
||||||
* make
|
|
||||||
* gcc (bootstrap)
|
|
||||||
* gcc-c++ (bootstrap)
|
|
||||||
* gmp-devel (bootstrap)
|
|
||||||
* mpfr-devel (bootstrap)
|
|
||||||
* libmpc-devel (bootstrap)
|
|
||||||
* zip
|
|
||||||
* perl
|
|
||||||
* dejagnu (testing)
|
|
||||||
* expect (testing)
|
|
||||||
* tcl (testing)
|
|
||||||
|
|
||||||
```
|
|
||||||
wget ftp://ftp.mpi-sb.mpg.de/pub/gnu/mirror/gcc.gnu.org/pub/gcc/releases/gcc-6.4.0/gcc-6.4.0.tar.gz
|
|
||||||
tar xf gcc-6.4.0.tar.gz
|
|
||||||
cd gcc-6.4.0
|
|
||||||
mkdir build
|
|
||||||
cd build
|
|
||||||
../configure --disable-multilib --prefix=<install-dst>
|
|
||||||
make
|
|
||||||
# Testing
|
|
||||||
make -k check
|
|
||||||
make install
|
|
||||||
```
|
|
||||||
|
|
||||||
*Do not put gcc + libs on PATH* (unless you know what you are doing).
|
|
||||||
|
|
||||||
## Installing cmake-3.7.2
|
|
||||||
|
|
||||||
Requirements on CentOS 7:
|
|
||||||
|
|
||||||
* wget
|
|
||||||
* make
|
|
||||||
* gcc
|
|
||||||
* gcc-c++
|
|
||||||
* ncurses-devel (optional, for ccmake)
|
|
||||||
|
|
||||||
```
|
|
||||||
wget https://cmake.org/files/v3.7/cmake-3.7.2.tar.gz
|
|
||||||
tar xf cmake-3.7.2.tar.gz
|
|
||||||
cd cmake-3.7.2.tar.gz
|
|
||||||
./bootstrap --prefix<install-dst>
|
|
||||||
make
|
|
||||||
make install
|
|
||||||
```
|
|
||||||
|
|
||||||
Put cmake on PATH (if appropriate)
|
|
||||||
|
|
||||||
**Fix the bug in CpackRPM**
|
|
||||||
|
|
||||||
`"<path-to-cmake>/share/cmake-3.7/Modules/CPackRPM.cmake" line 2273 of 2442`
|
|
||||||
|
|
||||||
The line
|
|
||||||
|
|
||||||
```
|
|
||||||
set(RPMBUILD_FLAGS "-bb")
|
|
||||||
```
|
|
||||||
needs to be before
|
|
||||||
|
|
||||||
```
|
|
||||||
if(CPACK_RPM_GENERATE_USER_BINARY_SPECFILE_TEMPLATE OR NOT CPACK_RPM_USER_BINARY_SPECFILE)
|
|
||||||
```
|
|
||||||
|
|
||||||
It was probably accidentally placed after, and is fixed in later cmake
|
|
||||||
releases.
|
|
||||||
|
|
||||||
## Installing clang-3.9
|
|
||||||
|
|
||||||
Requirements on CentOS 7:
|
|
||||||
|
|
||||||
* wget
|
|
||||||
* make
|
|
||||||
* cmake
|
|
||||||
|
|
||||||
```
|
|
||||||
wget http://releases.llvm.org/3.9.1/llvm-3.9.1.src.tar.xz
|
|
||||||
tar xf llvm-3.9.1.src.tar.xz
|
|
||||||
mv llvm-3.9.1.src llvm
|
|
||||||
|
|
||||||
wget http://releases.llvm.org/3.9.1/cfe-3.9.1.src.tar.xz
|
|
||||||
tar xf cfe-3.9.1.src.tar.xz
|
|
||||||
mv cfe-3.9.1.src llvm/tools/clang
|
|
||||||
|
|
||||||
cd llvm
|
|
||||||
mkdir build
|
|
||||||
cd build
|
|
||||||
cmake -DCMAKE_BUILD_TYPE="Release" -DGCC_INSTALL_PREFIX=<gcc-dir> \
|
|
||||||
-DCMAKE_C_COMPILER=<gcc> -DCMAKE_CXX_COMPILER=<g++> \
|
|
||||||
-DCMAKE_CXX_LINK_FLAGS="-L<gcc-dir>/lib64 -Wl,-rpath,<gcc-dir>/lib64" \
|
|
||||||
-DCMAKE_INSTALL_PREFIX=<install-dst> ..
|
|
||||||
make
|
|
||||||
# Testing
|
|
||||||
make check-clang
|
|
||||||
make install
|
|
||||||
```
|
|
||||||
|
|
||||||
Put clang on PATH (if appropriate)
|
|
||||||
|
|
||||||
## Memgraph
|
|
||||||
|
|
||||||
Requirements on CentOS 7:
|
|
||||||
|
|
||||||
* libuuid-devel (antlr4)
|
|
||||||
* java-1.8.0-openjdk (antlr4)
|
|
||||||
* boost-static (too low version --- compile manually)
|
|
||||||
* rpm-build (RPM)
|
|
||||||
* python3 (tests, ...)
|
|
||||||
* which (required for rocksdb)
|
|
||||||
* sbcl (lisp C++ preprocessing)
|
|
||||||
|
|
||||||
### Boost 1.62
|
|
||||||
|
|
||||||
```
|
|
||||||
wget https://netix.dl.sourceforge.net/project/boost/boost/1.62.0/boost_1_62_0.tar.gz
|
|
||||||
tar xf boost_1_62_0.tar.gz
|
|
||||||
cd boost_1_62_0
|
|
||||||
./bootstrap.sh --with-toolset=clang --with-libraries=iostreams,serialization --prefix=<install-dst>
|
|
||||||
./b2
|
|
||||||
# Default installs to /usr/local/
|
|
||||||
./b2 install
|
|
||||||
```
|
|
||||||
|
|
||||||
### Building Memgraph
|
|
||||||
|
|
||||||
clang is *required* to be findable by cmake, i.e. it should be on PATH.
|
|
||||||
cmake isn't required to be on the path, since you run it manually, so can use
|
|
||||||
the full path to executable in order to run it. Obviously, it is convenient to
|
|
||||||
put cmake also on PATH.
|
|
||||||
|
|
||||||
Building is done as explained in [Quick Start](quick-start.md), but each
|
|
||||||
`make` invocation needs to be prepended with:
|
|
||||||
|
|
||||||
`LD_RUN_PATH=<gcc-dir>/lib64 make ...`
|
|
||||||
|
|
||||||
### RPM
|
|
||||||
|
|
||||||
Name format: `memgraph-<version>-<pkg-version>.<arch>.rpm`
|
|
@ -20,18 +20,12 @@ Finally, make git aware of your favourite editor:
|
|||||||
|
|
||||||
git config --global core.editor "vim"
|
git config --global core.editor "vim"
|
||||||
|
|
||||||
## Phabricator
|
## Github
|
||||||
|
|
||||||
All of the code in Memgraph needs to go through code review before it can be
|
All of the code in Memgraph needs to go through code review before it can be
|
||||||
accepted in the codebase. This is done through
|
accepted in the codebase. This is done through [Github](https://github.com/).
|
||||||
[Phabricator](https://phacility.com/phabricator/). The command line tool for
|
You should already have it installed if you followed the steps in [Quick
|
||||||
interfacing with Phabricator is
|
Start](quick-start.md).
|
||||||
[arcanist](https://phacility.com/phabricator/arcanist/). You should already
|
|
||||||
have it installed if you followed the steps in [Quick Start](quick-start.md).
|
|
||||||
|
|
||||||
The only required setup is to go in the root of Memgraph's project and run:
|
|
||||||
|
|
||||||
arc install-certificate
|
|
||||||
|
|
||||||
## Working on Your Feature Branch
|
## Working on Your Feature Branch
|
||||||
|
|
||||||
@ -46,6 +40,7 @@ out of the `master` branch. For example, let's say you are adding static type
|
|||||||
checking to the query language compiler. You would create a branch called
|
checking to the query language compiler. You would create a branch called
|
||||||
`mg_query_static_typing` with the following command:
|
`mg_query_static_typing` with the following command:
|
||||||
|
|
||||||
|
# TODO(gitbuda): Discuss the naming conventions.
|
||||||
git branch mg_query_static_typing
|
git branch mg_query_static_typing
|
||||||
|
|
||||||
To switch to that branch, type:
|
To switch to that branch, type:
|
||||||
@ -98,34 +93,9 @@ possible.
|
|||||||
### Sending Changes on a Review
|
### Sending Changes on a Review
|
||||||
|
|
||||||
After finishing your work on your feature branch, you will want to send it on
|
After finishing your work on your feature branch, you will want to send it on
|
||||||
code review. This is done through Arcanist. To do that, run the following
|
code review. This is done by pushing the branch to Github and creating a pull
|
||||||
command:
|
request. You can find all PRs
|
||||||
|
[here](https://github.com/memgraph/memgraph/pulls).
|
||||||
arc diff
|
|
||||||
|
|
||||||
You will, once again, be presented with an editor where you need to describe
|
|
||||||
your whole work. `arc` will by default fill that description with your commit
|
|
||||||
messages. The title and summary of your work should also follow the
|
|
||||||
conventions of git messages as described above. If you followed the
|
|
||||||
guidelines, the message filled by `arc` should be fine.
|
|
||||||
|
|
||||||
In addition to the message, you need to fill the `Reviewers:` line with
|
|
||||||
usernames of people who should do the code review.
|
|
||||||
|
|
||||||
You changes will be visible on Phabricator as a so called "diff". You can find
|
|
||||||
the default view of active diffs
|
|
||||||
[here](https://phabricator.memgraph.io/differential/)
|
|
||||||
|
|
||||||
### Updating Changes Based on Review
|
|
||||||
|
|
||||||
When you get comments in the code review, you will want to make additional
|
|
||||||
modifications to your work. The same workflow as before applies: [Making and
|
|
||||||
Committing Changes](#making-and-committing-changes)
|
|
||||||
|
|
||||||
After making those changes, send them back on code review:
|
|
||||||
|
|
||||||
arc diff
|
|
||||||
|
|
||||||
|
|
||||||
### Updating From New Master
|
### Updating From New Master
|
||||||
|
|
||||||
@ -152,26 +122,3 @@ your feature branch as if you just created and started working on that branch.
|
|||||||
You may continue with the usual workflow of [Making and Committing
|
You may continue with the usual workflow of [Making and Committing
|
||||||
Changes](#making-and-committing-changes) and [Sending Changes on a
|
Changes](#making-and-committing-changes) and [Sending Changes on a
|
||||||
Review](#sending-changes-on-a-review).
|
Review](#sending-changes-on-a-review).
|
||||||
|
|
||||||
### Sending Your Changes on Master Branch
|
|
||||||
|
|
||||||
When your changes pass the code review, you are ready to integrate them in the
|
|
||||||
`master` branch. To do that, run the following command:
|
|
||||||
|
|
||||||
arc land
|
|
||||||
|
|
||||||
Arcanist will take care of obtaining the latest changes from `master` and
|
|
||||||
merging your changes on top. If the `land` was successful, Arcanist will
|
|
||||||
delete your local branch and you will be back on `master`. Continuing from the
|
|
||||||
examples above, the deleted branch would be `mg_query_static_typing`.
|
|
||||||
|
|
||||||
This marks the completion of your changes, and you are ready to work on
|
|
||||||
something else.
|
|
||||||
|
|
||||||
### Note For People Familiar With Git
|
|
||||||
|
|
||||||
Since Arcanist takes care of merging your git commits and pushing them on
|
|
||||||
`master`, you should *never* have to call `git merge` and `git push`. If you
|
|
||||||
find yourself typing those commands, check that you are doing the right thing.
|
|
||||||
The most common mistake is to use `git merge` instead of `git rebase` for the
|
|
||||||
case described in [Updating From New Master](#updating-from-new-master).
|
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
# Memgraph Code Documentation
|
# Memgraph Code Documentation
|
||||||
|
|
||||||
IMPORTANT: auto-generated (run doxygen Doxyfile in the project root)
|
IMPORTANT: Auto-generated (run doxygen Doxyfile in the project root).
|
||||||
|
|
||||||
* HTML - just open docs/doxygen/html/index.html
|
* HTML - Open docs/doxygen/html/index.html.
|
||||||
|
* Latex - Run make inside docs/doxygen/latex.
|
||||||
* Latex - run make inside docs/doxygen/latex
|
|
||||||
|
Before Width: | Height: | Size: 6.6 KiB After Width: | Height: | Size: 6.6 KiB |
3
docs/feature_spec/contents.md
Normal file
3
docs/feature_spec/contents.md
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
# Feature Specifications
|
||||||
|
|
||||||
|
* [Python Query Modules](python-query-modules.md)
|
@ -23,12 +23,12 @@ scenarios:
|
|||||||
|
|
||||||
To release a new major.minor release of Memgraph you should execute the
|
To release a new major.minor release of Memgraph you should execute the
|
||||||
following steps:
|
following steps:
|
||||||
1. Land all diffs that must be in the new release
|
1. Merge all PRs that must be in the new release
|
||||||
2. Document all changes in `CHANGELOG.md` and land them
|
2. Document all changes in `CHANGELOG.md` and merge them
|
||||||
3. From the `master` branch, create a branch named `release/X.Y` and push it
|
3. From the `master` branch, create a branch named `release/X.Y` and push it
|
||||||
to `origin`
|
to `origin`
|
||||||
4. Create the release packages triggering a `mg-master-release-branch-test`
|
4. Create the release packages triggering a `Release {{Operating System}}`
|
||||||
using branch `release/X.Y` on Apollo
|
workflow using branch `release/X.Y` on Github Actions
|
||||||
5. Enjoy
|
5. Enjoy
|
||||||
|
|
||||||
To release a new patch release in an existing major.minor series you should
|
To release a new patch release in an existing major.minor series you should
|
||||||
@ -38,6 +38,6 @@ execute the following steps:
|
|||||||
3. Document all changes in `CHANGELOG.md` and commit them
|
3. Document all changes in `CHANGELOG.md` and commit them
|
||||||
4. Edit the root `CMakeLists.txt` and set `MEMGRAPH_OVERRIDE_VERSION` to
|
4. Edit the root `CMakeLists.txt` and set `MEMGRAPH_OVERRIDE_VERSION` to
|
||||||
`X.Y.patch` and commit the change
|
`X.Y.patch` and commit the change
|
||||||
5. Create the release packages triggering a `mg-master-release-branch-test`
|
5. Create the release packages triggering a `Release {{Operating System}}`
|
||||||
using branch `release/X.Y` on Apollo
|
workflow using branch `release/X.Y` on Github Actions
|
||||||
6. Enjoy
|
6. Enjoy
|
||||||
|
Loading…
Reference in New Issue
Block a user